Modified: 
hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
 (original)
+++ 
hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
 Tue Oct 14 19:06:45 2014
@@ -18,30 +18,47 @@
  */
 package org.apache.hive.hcatalog.pig;
 
+import com.google.common.collect.ImmutableSet;
+
 import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Map;
-
-import junit.framework.TestCase;
+import java.util.Set;
 
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.io.IOConstants;
+import org.apache.hadoop.hive.ql.io.StorageFormats;
 import org.apache.hadoop.hive.ql.session.SessionState;
+
 import org.apache.hive.hcatalog.common.HCatUtil;
 import org.apache.hive.hcatalog.data.Pair;
+
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 
-public class TestHCatStorerMulti extends TestCase {
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assume.assumeTrue;
+
+@RunWith(Parameterized.class)
+public class TestHCatStorerMulti {
   public static final String TEST_DATA_DIR = HCatUtil.makePathASafeFileName(
-          System.getProperty("user.dir") + "/build/test/data/" +
-                  TestHCatStorerMulti.class.getCanonicalName() + "-" + 
System.currentTimeMillis());
+      System.getProperty("user.dir") + "/build/test/data/" +
+          TestHCatStorerMulti.class.getCanonicalName() + "-" + 
System.currentTimeMillis());
   private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + 
"/warehouse";
   private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
 
@@ -51,9 +68,29 @@ public class TestHCatStorerMulti extends
 
   private static Map<Integer, Pair<Integer, String>> basicInputData;
 
-  protected String storageFormat() {
-    return "RCFILE 
tblproperties('hcat.isd'='org.apache.hive.hcatalog.rcfile.RCFileInputDriver'," +
-      "'hcat.osd'='org.apache.hive.hcatalog.rcfile.RCFileOutputDriver')";
+  private static final Map<String, Set<String>> DISABLED_STORAGE_FORMATS =
+      new HashMap<String, Set<String>>() {{
+        put(IOConstants.AVRO, new HashSet<String>() {{
+          add("testStoreBasicTable");
+          add("testStorePartitionedTable");
+          add("testStoreTableMulti");
+        }});
+        put(IOConstants.PARQUETFILE, new HashSet<String>() {{
+          add("testStoreBasicTable");
+          add("testStorePartitionedTable");
+          add("testStoreTableMulti");
+        }});
+      }};
+
+  private String storageFormat;
+
+  @Parameterized.Parameters
+  public static Collection<Object[]> generateParameters() {
+    return StorageFormats.names();
+  }
+
+  public TestHCatStorerMulti(String storageFormat) {
+    this.storageFormat = storageFormat;
   }
 
   private void dropTable(String tablename) throws IOException, 
CommandNeedRetryException {
@@ -66,7 +103,7 @@ public class TestHCatStorerMulti extends
     if ((partitionedBy != null) && (!partitionedBy.trim().isEmpty())) {
       createTable = createTable + "partitioned by (" + partitionedBy + ") ";
     }
-    createTable = createTable + "stored as " + storageFormat();
+    createTable = createTable + "stored as " + storageFormat;
     int retCode = driver.run(createTable).getResponseCode();
     if (retCode != 0) {
       throw new IOException("Failed to create table. [" + createTable + "], 
return code from hive driver : [" + retCode + "]");
@@ -77,8 +114,10 @@ public class TestHCatStorerMulti extends
     createTable(tablename, schema, null);
   }
 
-  @Override
-  protected void setUp() throws Exception {
+  @Before
+  public void setUp() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
+
     if (driver == null) {
       HiveConf hiveConf = new HiveConf(this.getClass());
       hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
@@ -92,14 +131,14 @@ public class TestHCatStorerMulti extends
     cleanup();
   }
 
-  @Override
-  protected void tearDown() throws Exception {
+  @After
+  public void tearDown() throws Exception {
     cleanup();
   }
 
+  @Test
   public void testStoreBasicTable() throws Exception {
-
-
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     createTable(BASIC_TABLE, "a int, b string");
 
     populateBasicFile();
@@ -117,7 +156,9 @@ public class TestHCatStorerMulti extends
     assertEquals(basicInputData.size(), 
unpartitionedTableValuesReadFromHiveDriver.size());
   }
 
+  @Test
   public void testStorePartitionedTable() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     createTable(PARTITIONED_TABLE, "a int, b string", "bkt string");
 
     populateBasicFile();
@@ -139,9 +180,9 @@ public class TestHCatStorerMulti extends
     assertEquals(basicInputData.size(), 
partitionedTableValuesReadFromHiveDriver.size());
   }
 
+  @Test
   public void testStoreTableMulti() throws Exception {
-
-
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     createTable(BASIC_TABLE, "a int, b string");
     createTable(PARTITIONED_TABLE, "a int, b string", "bkt string");
 

Modified: 
hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java
 (original)
+++ 
hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java
 Tue Oct 14 19:06:45 2014
@@ -25,10 +25,13 @@ import java.util.Iterator;
 import java.util.UUID;
 
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+
 import org.apache.hive.hcatalog.HcatTestUtils;
 import org.apache.hive.hcatalog.mapreduce.HCatBaseTest;
+
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
+
 import org.junit.Assert;
 import org.junit.Test;
 

Modified: 
hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestPigHCatUtil.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestPigHCatUtil.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestPigHCatUtil.java
 (original)
+++ 
hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestPigHCatUtil.java
 Tue Oct 14 19:06:45 2014
@@ -20,14 +20,18 @@
 package org.apache.hive.hcatalog.pig;
 
 import com.google.common.collect.Lists;
+
 import junit.framework.Assert;
+
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
+
 import org.apache.pig.ResourceSchema;
 import org.apache.pig.ResourceSchema.ResourceFieldSchema;
 import org.apache.pig.data.DataType;
 import org.apache.pig.impl.util.UDFContext;
+
 import org.junit.Test;
 
 public class TestPigHCatUtil {

Modified: hive/branches/llap/hcatalog/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/pom.xml?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/pom.xml (original)
+++ hive/branches/llap/hcatalog/pom.xml Tue Oct 14 19:06:45 2014
@@ -46,6 +46,15 @@
     <module>streaming</module>
   </modules>
 
+  <dependencies>
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-all</artifactId>
+      <version>${mockito-all.version}</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
   <profiles>
     <profile>
       <id>hadoop-1</id>

Modified: 
hive/branches/llap/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/NotificationListener.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/NotificationListener.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/NotificationListener.java
 (original)
+++ 
hive/branches/llap/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/NotificationListener.java
 Tue Oct 14 19:06:45 2014
@@ -214,7 +214,7 @@ public class NotificationListener extend
       HiveConf conf = handler.getHiveConf();
       Table newTbl;
       try {
-        newTbl = handler.get_table(tbl.getDbName(), tbl.getTableName())
+        newTbl = handler.get_table_core(tbl.getDbName(), tbl.getTableName())
           .deepCopy();
         newTbl.getParameters().put(
           HCatConstants.HCAT_MSGBUS_TOPIC_NAME,

Modified: hive/branches/llap/hcatalog/src/test/e2e/templeton/build.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/src/test/e2e/templeton/build.xml?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/src/test/e2e/templeton/build.xml (original)
+++ hive/branches/llap/hcatalog/src/test/e2e/templeton/build.xml Tue Oct 14 
19:06:45 2014
@@ -111,6 +111,7 @@
             <env key="USER_NAME" value="${test.user.name}"/>
             <env key="HARNESS_CONF" value="${basedir}/conf/default.conf"/>
             <env key="SECURE_MODE" value="${secure.mode}"/>
+            <env key="USER_REALM" value="${user.realm}"/>
             <env key="HADOOP_VERSION" value="${hadoopversion}"/>
             <arg value="./test_harness.pl"/>
             <arg line="${tests.to.run}"/>
@@ -148,6 +149,7 @@
             <env key="OTHER_USER_NAME" value="${test.other.user.name}"/>
             <env key="HARNESS_CONF" value="${basedir}/conf/default.conf"/>
             <env key="SECURE_MODE" value="${secure.mode}"/>
+            <env key="USER_REALM" value="${user.realm}"/>
             <env key="KEYTAB_DIR" value="${keytab.dir}"/>
             <arg value="./test_harness.pl"/>
             <arg line="${tests.to.run}"/>
@@ -186,7 +188,9 @@
             <env key="USER_NAME" value="${test.user.name}"/>
             <env key="DOAS_USER" value="${doas.user}"/>
             <env key="HARNESS_CONF" value="${basedir}/conf/default.conf"/>
+            <env key="USER_REALM" value="${user.realm}"/>
             <env key="SECURE_MODE" value="${secure.mode}"/>
+            <env key="USER_REALM" value="${user.realm}"/>
             <arg value="./test_harness.pl"/>
             <arg line="${tests.to.run}"/>
             <arg value="${basedir}/tests/doas.conf"/>

Modified: 
hive/branches/llap/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
 (original)
+++ 
hive/branches/llap/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
 Tue Oct 14 19:06:45 2014
@@ -77,6 +77,11 @@
             shipped to the target node in the cluster to execute Pig job which 
uses
             HCat, Hive query, etc.</description>
     </property>
+    <property>
+        <name>templeton.sqoop.path</name>
+        <value>${env.SQOOP_HOME}/bin/sqoop</value>
+        <description>The path to the Sqoop executable.</description>
+    </property>
 
     <property>
         <name>templeton.controller.mr.child.opts</name>

Modified: hive/branches/llap/hcatalog/src/test/e2e/templeton/deployers/env.sh
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/src/test/e2e/templeton/deployers/env.sh?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/src/test/e2e/templeton/deployers/env.sh 
(original)
+++ hive/branches/llap/hcatalog/src/test/e2e/templeton/deployers/env.sh Tue Oct 
14 19:06:45 2014
@@ -30,7 +30,7 @@ export PIG_VERSION=0.12.2-SNAPSHOT
 export PROJ_HOME=/Users/${USER}/dev/hive
 export 
HIVE_HOME=${PROJ_HOME}/packaging/target/apache-hive-${HIVE_VERSION}-bin/apache-hive-${HIVE_VERSION}-bin
 export 
HADOOP_HOME=/Users/${USER}/dev/hwxhadoop/hadoop-dist/target/hadoop-${HADOOP_VERSION}
-#export SQOOP_HOME=/
+export SQOOP_HOME=/Users/${USER}/dev/sqoop-1.4.4.bin__hadoop-2.0.4-alpha
 
 #Make sure Pig is built for the Hadoop version you are running
 export PIG_TAR_PATH=/Users/${USER}/dev/pig-${PIG_VERSION}-src/build

Modified: 
hive/branches/llap/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm 
(original)
+++ 
hive/branches/llap/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm 
Tue Oct 14 19:06:45 2014
@@ -193,6 +193,7 @@ sub globalSetup
     $globalHash->{'db_password'} = $ENV{'DB_PASSWORD'};
 
     $globalHash->{'is_secure_mode'} = $ENV{'SECURE_MODE'};
+    $globalHash->{'user_realm'} = $ENV{'USER_REALM'};
 
     # add libexec location to the path
     if (defined($ENV{'PATH'})) {
@@ -491,7 +492,14 @@ sub execCurlCmd(){
       } elsif(scalar @files > 1){
         die "More than one keytab file found for user $user_name in 
$keytab_dir";
       }
-      my @cmd = ('kinit', '-k', '-t', $files[0], $user_name);
+      my @cmd = ();
+      if (defined $testCmd->{'user_realm'}){
+          my $user_name_with_realm_name = 
$user_name.'@'.$testCmd->{'user_realm'};
+          @cmd = ('kinit', '-k', '-t', $files[0], $user_name_with_realm_name);
+      }
+      else{
+          @cmd = ('kinit', '-k', '-t', $files[0], $user_name);
+      }
       print $log "Command  @cmd";
       IPC::Run::run(\@cmd, \undef, $log, $log) or 
           die "Could not kinit as $user_name using " .  $files[0] . " $ERRNO";

Modified: hive/branches/llap/hcatalog/src/test/e2e/templeton/tests/doas.conf
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/src/test/e2e/templeton/tests/doas.conf?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/src/test/e2e/templeton/tests/doas.conf 
(original)
+++ hive/branches/llap/hcatalog/src/test/e2e/templeton/tests/doas.conf Tue Oct 
14 19:06:45 2014
@@ -109,7 +109,7 @@ $cfg = 
      'method' => 'GET',
      'url' => 
':TEMPLETON_URL:/templeton/v1/ddl/database/default/table/:UNAME:_doastab2/partition?user.name=:UNAME:&doAs=:DOAS:',
      'status_code' => 500,
-     'json_field_substr_match' => {'error' => 'FAILED: AuthorizationException 
java\.security\.AccessControlException: action READ not permitted on path .* 
for user :DOAS:'},
+     'json_field_substr_match' => {'error' => 
'java\.security\.AccessControlException: Permission denied: user=:DOAS:, 
access=READ'},
     },
   
     {
@@ -118,7 +118,7 @@ $cfg = 
      'method' => 'DELETE',
      'url' => 
':TEMPLETON_URL:/templeton/v1/ddl/database/default/table/:UNAME:_doastab2?user.name=:UNAME:&doAs=:DOAS:',
      'status_code' => 500,
-     'json_field_substr_match' => {'error' => 
'java\.security\.AccessControlException: action WRITE not permitted on path .* 
for user :DOAS:'},
+     'json_field_substr_match' => {'error' => 
'java\.security\.AccessControlException: Permission denied: user=:DOAS:, 
access=READ'},
     },
     {
              #descbe the table....

Modified: 
hive/branches/llap/hcatalog/src/test/e2e/templeton/tests/hcatperms.conf
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/src/test/e2e/templeton/tests/hcatperms.conf?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/src/test/e2e/templeton/tests/hcatperms.conf 
(original)
+++ hive/branches/llap/hcatalog/src/test/e2e/templeton/tests/hcatperms.conf Tue 
Oct 14 19:06:45 2014
@@ -375,6 +375,13 @@ $cfg = 
                  {
                   'method' => 'DELETE',
                   'format_header' => 'Content-Type: application/json',
+                  'url' => 
':TEMPLETON_URL:/templeton/v1/ddl/database/hcatperms_:TNUM:/table/permstable_:TNUM:',
+                  'user_name' => ':UNAME_GROUP:',
+                  'status_code' => 200,
+                 },
+                 {
+                  'method' => 'DELETE',
+                  'format_header' => 'Content-Type: application/json',
                   'url' => 
':TEMPLETON_URL:/templeton/v1/ddl/database/hcatperms_:TNUM:?ifExists=true&option=cascade',
                   'user_name' => ':UNAME:',
                   'status_code' => 200,
@@ -677,9 +684,7 @@ $cfg = 
       'format_header' => 'Content-Type: application/json', 
       'user_name' => ':UNAME_OTHER:',
       'status_code' => 500,
-     'json_field_substr_match' => {'error' => 'FAILED: AuthorizationException 
.*\.security\.AccessControlException: action READ not permitted on path .* for 
user :UNAME_OTHER:'},
-
-
+     'json_field_substr_match' => {'error' => 'AccessControlException: 
Permission denied: user=:UNAME_OTHER:, access=READ'},
     },
 
 

Modified: 
hive/branches/llap/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
 (original)
+++ 
hive/branches/llap/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
 Tue Oct 14 19:06:45 2014
@@ -240,6 +240,7 @@ public class HiveEndPoint {
     private final HiveEndPoint endPt;
     private final UserGroupInformation ugi;
     private final String username;
+    private final boolean secureMode;
 
     /**
      * @param endPoint end point to connect to
@@ -261,7 +262,8 @@ public class HiveEndPoint {
       if (conf==null) {
         conf = HiveEndPoint.createHiveConf(this.getClass(), 
endPoint.metaStoreUri);
       }
-      this.msClient = getMetaStoreClient(endPoint, conf);
+      this.secureMode = ugi==null ? false : ugi.hasKerberosCredentials();
+      this.msClient = getMetaStoreClient(endPoint, conf, secureMode);
       if (createPart  &&  !endPoint.partitionVals.isEmpty()) {
         createPartitionIfNotExists(endPoint, msClient, conf);
       }
@@ -425,13 +427,15 @@ public class HiveEndPoint {
       return buff.toString();
     }
 
-    private static IMetaStoreClient getMetaStoreClient(HiveEndPoint endPoint, 
HiveConf conf)
+    private static IMetaStoreClient getMetaStoreClient(HiveEndPoint endPoint, 
HiveConf conf, boolean secureMode)
             throws ConnectionError {
 
       if (endPoint.metaStoreUri!= null) {
         conf.setVar(HiveConf.ConfVars.METASTOREURIS, endPoint.metaStoreUri);
       }
-
+      if(secureMode) {
+        conf.setBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL,true);
+      }
       try {
         return new HiveMetaStoreClient(conf);
       } catch (MetaException e) {

Modified: 
hive/branches/llap/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java
 (original)
+++ 
hive/branches/llap/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java
 Tue Oct 14 19:06:45 2014
@@ -168,7 +168,7 @@ public class HCatTable {
       newTable.setTableType(TableType.MANAGED_TABLE.toString());
     }
 
-    if (this.comment != null) {
+    if (StringUtils.isNotBlank(this.comment)) {
       newTable.putToParameters("comment", comment);
     }
 

Modified: hive/branches/llap/hcatalog/webhcat/svr/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/webhcat/svr/pom.xml?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/webhcat/svr/pom.xml (original)
+++ hive/branches/llap/hcatalog/webhcat/svr/pom.xml Tue Oct 14 19:06:45 2014
@@ -49,6 +49,11 @@
     <!-- inter-project -->
     <dependency>
       <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-core</artifactId>
+      <version>${jersey.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
       <artifactId>jersey-json</artifactId>
       <version>${jersey.version}</version>
     </dependency>

Modified: 
hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java
 (original)
+++ 
hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java
 Tue Oct 14 19:06:45 2014
@@ -32,7 +32,7 @@ public class QueueStatusBean {
   public JobStatus status;
   public JobProfile profile;
 
-  public String id;
+  public final String id;
   public String parentId;
   public String percentComplete;
   public Long exitValue;
@@ -40,8 +40,11 @@ public class QueueStatusBean {
   public String callback;
   public String completed;
   public Map<String, Object> userargs;
+  public String msg;
 
-  public QueueStatusBean() {
+  public QueueStatusBean(String jobId, String errMsg) {
+    this.id = jobId;
+    this.msg = errMsg;
   }
 
   /**

Modified: 
hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
 (original)
+++ 
hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
 Tue Oct 14 19:06:45 2014
@@ -1008,8 +1008,15 @@ public class Server {
       jobItem.id = job;
       if (showDetails) {
         StatusDelegator sd = new StatusDelegator(appConf);
-        QueueStatusBean statusBean = sd.run(getDoAsUser(), job);
-        jobItem.detail = statusBean;
+        try {
+          jobItem.detail = sd.run(getDoAsUser(), job);
+        }
+        catch(Exception ex) {
+          /*if we could not get status for some reason, log it, and send empty 
status back with
+          * just the ID so that caller knows to even look in the log file*/
+          LOG.info("Failed to get status detail for jobId='" + job + "'", ex);
+          jobItem.detail = new QueueStatusBean(job, "Failed to retrieve 
status; see WebHCat logs");
+        }
       }
       detailList.add(jobItem);
     }

Modified: 
hive/branches/llap/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
 (original)
+++ 
hive/branches/llap/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
 Tue Oct 14 19:06:45 2014
@@ -24,6 +24,7 @@ import java.net.URISyntaxException;
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hive.shims.HadoopShimsSecure;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.util.StringUtils;
@@ -314,11 +315,11 @@ public class TestTempletonUtils {
 
   @Test
   public void testFindContainingJar() throws Exception {
-    String result = TempletonUtils.findContainingJar(ShimLoader.class, 
".*hive-shims.*");
+    String result = TempletonUtils.findContainingJar(Configuration.class, 
".*hadoop.*\\.jar.*");
     Assert.assertNotNull(result);
-    result = TempletonUtils.findContainingJar(HadoopShimsSecure.class, 
".*hive-shims.*");
+    result = TempletonUtils.findContainingJar(FileSystem.class, 
".*hadoop.*\\.jar.*");
     Assert.assertNotNull(result);
     result = TempletonUtils.findContainingJar(HadoopShimsSecure.class, 
".*unknownjar.*");
-    Assert.assertNull(result);
+    Assert.assertNull("unexpectedly found jar for HadoopShimsSecure class: " + 
result, result);
   }
 }

Modified: hive/branches/llap/itests/hive-minikdc/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-minikdc/pom.xml?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/itests/hive-minikdc/pom.xml (original)
+++ hive/branches/llap/itests/hive-minikdc/pom.xml Tue Oct 14 19:06:45 2014
@@ -60,6 +60,13 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hive</groupId>
+      <artifactId>hive-it-unit</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+      <classifier>tests</classifier>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
       <artifactId>hive-jdbc</artifactId>
       <version>${project.version}</version>
       <scope>test</scope>

Modified: 
hive/branches/llap/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java
 (original)
+++ 
hive/branches/llap/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java
 Tue Oct 14 19:06:45 2014
@@ -28,9 +28,9 @@ import junit.framework.Assert;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext;
-import org.apache.hadoop.hive.ql.hooks.HookContext;
-import org.apache.hadoop.hive.ql.hooks.HookContext.HookType;
+import org.apache.hadoop.hive.hooks.TestHs2Hooks.PostExecHook;
+import org.apache.hadoop.hive.hooks.TestHs2Hooks.PreExecHook;
+import org.apache.hadoop.hive.hooks.TestHs2Hooks.SemanticAnalysisHook;
 import org.apache.hive.jdbc.miniHS2.MiniHS2;
 import org.junit.After;
 import org.junit.AfterClass;
@@ -42,43 +42,18 @@ import org.junit.Test;
  * Tests information retrieved from hooks, in Kerberos mode.
  */
 public class TestHs2HooksWithMiniKdc {
-  public static class PostExecHook implements ExecuteWithHookContext {
-    public static String userName = null;
-    public static String ipAddress = null;
-
-    public void run(HookContext hookContext) {
-      if (hookContext.getHookType().equals(HookType.POST_EXEC_HOOK)) {
-        Assert.assertNotNull(hookContext.getIpAddress(), "IP Address is null");
-        ipAddress = hookContext.getIpAddress();
-        Assert.assertNotNull(hookContext.getUserName(), "Username is null");
-        userName = hookContext.getUserName();
-      }
-    }
-  }
-
-  public static class PreExecHook implements ExecuteWithHookContext {
-    public static String userName = null;
-    public static String ipAddress = null;
-
-    public void run(HookContext hookContext) {
-      if (hookContext.getHookType().equals(HookType.PRE_EXEC_HOOK)) {
-        Assert.assertNotNull(hookContext.getIpAddress(), "IP Address is null");
-        ipAddress = hookContext.getIpAddress();
-        Assert.assertNotNull(hookContext.getUserName(), "Username is null");
-        userName = hookContext.getUserName();
-      }
-    }
-  }
   private static MiniHS2 miniHS2 = null;
   private static MiniHiveKdc miniHiveKdc = null;
   private static Map<String, String> confOverlay = new HashMap<String, 
String>();
   private Connection hs2Conn;
 
   @BeforeClass
-  public static void beforeTest() throws Exception {
+  public static void setUpBeforeClass() throws Exception {
     Class.forName(MiniHS2.getJdbcDriverName());
     confOverlay.put(ConfVars.POSTEXECHOOKS.varname, 
PostExecHook.class.getName());
     confOverlay.put(ConfVars.PREEXECHOOKS.varname, 
PreExecHook.class.getName());
+    confOverlay.put(ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
+        SemanticAnalysisHook.class.getName());
 
     HiveConf hiveConf = new HiveConf();
     miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf);
@@ -86,12 +61,30 @@ public class TestHs2HooksWithMiniKdc {
     miniHS2.start(confOverlay);
   }
 
+  @AfterClass
+  public static void tearDownAfterClass() throws Exception {
+    miniHS2.stop();
+  }
+
   @Before
-  public void setUp() throws Exception {
+  public void setUpTest() throws Exception {
+    PreExecHook.userName = null;
+    PreExecHook.ipAddress = null;
+    PreExecHook.operation = null;
+    PreExecHook.error = null;
+    PostExecHook.userName = null;
+    PostExecHook.ipAddress = null;
+    PostExecHook.operation = null;
+    PostExecHook.error = null;
+    SemanticAnalysisHook.userName = null;
+    SemanticAnalysisHook.ipAddress = null;
+    SemanticAnalysisHook.command = null;
+    SemanticAnalysisHook.preAnalyzeError = null;
+    SemanticAnalysisHook.postAnalyzeError = null;
   }
 
   @After
-  public void tearDown() throws Exception {
+  public void tearDownTest() throws Exception {
     if (hs2Conn != null) {
       try {
         hs2Conn.close();
@@ -101,29 +94,57 @@ public class TestHs2HooksWithMiniKdc {
     }
   }
 
-  @AfterClass
-  public static void afterTest() throws Exception {
-    miniHS2.stop();
-  }
-
   /**
-   * Test get IpAddress and username from hook.
-   * @throws Exception
+   * Test that hook context properties are correctly set.
    */
   @Test
-  public void testIpUserName() throws Exception {
+  public void testHookContexts() throws Throwable {
     miniHiveKdc.loginUser(MiniHiveKdc.HIVE_TEST_USER_1);
     hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL());
 
     Statement stmt = hs2Conn.createStatement();
+    stmt.executeQuery("show databases");
     stmt.executeQuery("show tables");
+    Throwable error = PostExecHook.error;
+    if (error != null) {
+      throw error;
+    }
+    error = PreExecHook.error;
+    if (error != null) {
+      throw error;
+    }
 
+    Assert.assertNotNull(PostExecHook.ipAddress, "ipaddress is null");
+    Assert.assertNotNull(PostExecHook.userName, "userName is null");
+    Assert.assertNotNull(PostExecHook.operation , "operation is null");
     Assert.assertEquals(MiniHiveKdc.HIVE_TEST_USER_1, PostExecHook.userName);
-    Assert.assertNotNull(PostExecHook.ipAddress);
-    Assert.assertTrue(PostExecHook.ipAddress.contains("127.0.0.1"));
+    Assert.assertTrue(PostExecHook.ipAddress, 
PostExecHook.ipAddress.contains("127.0.0.1"));
+    Assert.assertEquals("SHOWTABLES", PostExecHook.operation);
 
+    Assert.assertNotNull(PreExecHook.ipAddress, "ipaddress is null");
+    Assert.assertNotNull(PreExecHook.userName, "userName is null");
+    Assert.assertNotNull(PreExecHook.operation , "operation is null");
     Assert.assertEquals(MiniHiveKdc.HIVE_TEST_USER_1, PreExecHook.userName);
-    Assert.assertNotNull(PreExecHook.ipAddress);
-    Assert.assertTrue(PreExecHook.ipAddress.contains("127.0.0.1"));
+    Assert.assertTrue(PreExecHook.ipAddress, 
PreExecHook.ipAddress.contains("127.0.0.1"));
+    Assert.assertEquals("SHOWTABLES", PreExecHook.operation);
+
+    error = SemanticAnalysisHook.preAnalyzeError;
+    if (error != null) {
+      throw error;
+    }
+    error = SemanticAnalysisHook.postAnalyzeError;
+    if (error != null) {
+      throw error;
+    }
+
+    Assert.assertNotNull(SemanticAnalysisHook.ipAddress,
+        "semantic hook context ipaddress is null");
+    Assert.assertNotNull(SemanticAnalysisHook.userName,
+        "semantic hook context userName is null");
+    Assert.assertNotNull(SemanticAnalysisHook.command ,
+        "semantic hook context command is null");
+    Assert.assertTrue(SemanticAnalysisHook.ipAddress,
+        SemanticAnalysisHook.ipAddress.contains("127.0.0.1"));
+    Assert.assertEquals("show tables", SemanticAnalysisHook.command);
   }
-}
+}
\ No newline at end of file

Modified: 
hive/branches/llap/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestExtendedAcls.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestExtendedAcls.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestExtendedAcls.java
 (original)
+++ 
hive/branches/llap/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestExtendedAcls.java
 Tue Oct 14 19:06:45 2014
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.junit.Assert;
 import org.junit.BeforeClass;
 
+import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
 
 public class TestExtendedAcls extends FolderPermissionBase {
@@ -46,7 +47,7 @@ public class TestExtendedAcls extends Fo
     baseSetup();
   }
 
-  List<AclEntry> aclSpec1 = Lists.newArrayList(
+  private final ImmutableList<AclEntry> aclSpec1 = ImmutableList.of(
       aclEntry(ACCESS, USER, FsAction.ALL),
       aclEntry(ACCESS, GROUP, FsAction.ALL),
       aclEntry(ACCESS, OTHER, FsAction.ALL),
@@ -55,7 +56,7 @@ public class TestExtendedAcls extends Fo
       aclEntry(ACCESS, GROUP, "bar", FsAction.READ_WRITE),
       aclEntry(ACCESS, GROUP, "foo", FsAction.READ_EXECUTE));
 
-  List<AclEntry> aclSpec2 = Lists.newArrayList(
+  private final ImmutableList<AclEntry> aclSpec2 = ImmutableList.of(
       aclEntry(ACCESS, USER, FsAction.ALL),
       aclEntry(ACCESS, GROUP, FsAction.ALL),
       aclEntry(ACCESS, OTHER, FsAction.READ_EXECUTE),
@@ -83,20 +84,20 @@ public class TestExtendedAcls extends Fo
     switch (permIndex) {
       case 0:
         FsPermission perm = fs.getFileStatus(new Path(locn)).getPermission();
-        Assert.assertEquals(perm.toString(), "rwxrwxrwx");
+        Assert.assertEquals("Location: " + locn, "rwxrwxrwx", 
String.valueOf(perm));
 
         List<AclEntry> actual = getAcl(locn);
         verifyAcls(aclSpec1, actual);
         break;
       case 1:
         perm = fs.getFileStatus(new Path(locn)).getPermission();
-        Assert.assertEquals(perm.toString(), "rwxrwxr-x");
+        Assert.assertEquals("Location: " + locn, "rwxrwxr-x", 
String.valueOf(perm));
 
         List<AclEntry> acls = getAcl(locn);
         verifyAcls(aclSpec2, acls);
         break;
       default:
-        throw new RuntimeException("Only 2 permissions by this test");
+        throw new RuntimeException("Only 2 permissions by this test: " + 
permIndex);
     }
   }
 

Modified: 
hive/branches/llap/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
 (original)
+++ 
hive/branches/llap/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
 Tue Oct 14 19:06:45 2014
@@ -32,6 +32,7 @@ import org.apache.hadoop.fs.permission.F
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.shims.HadoopShims.MiniDFSShim;
 import org.apache.hadoop.hive.shims.HadoopShims.MiniMrShim;
 import org.apache.hadoop.hive.shims.ShimLoader;
@@ -50,6 +51,7 @@ public class MiniHS2 extends AbstractHiv
   public static final String HS2_HTTP_MODE = "http";
   private static final String driverName = "org.apache.hive.jdbc.HiveDriver";
   private static final FsPermission FULL_PERM = new FsPermission((short)00777);
+  private static final FsPermission WRITE_ALL_PERM = new 
FsPermission((short)00733);
   private HiveServer2 hiveServer2 = null;
   private final File baseDir;
   private final Path baseDfsDir;
@@ -200,9 +202,8 @@ public class MiniHS2 extends AbstractHiv
     hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT, getHttpPort());
 
     Path scratchDir = new Path(baseDfsDir, "scratch");
-
-    // Create scratchdir with 777, so that user impersonation has no issues.
-    FileSystem.mkdirs(fs, scratchDir, FULL_PERM);
+    // Create root scratchdir with write all, so that user impersonation has 
no issues.
+    Utilities.createDirsWithPermission(hiveConf, scratchDir, WRITE_ALL_PERM, 
true);
     System.setProperty(HiveConf.ConfVars.SCRATCHDIR.varname, 
scratchDir.toString());
     hiveConf.setVar(ConfVars.SCRATCHDIR, scratchDir.toString());
 

Modified: 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
 (original)
+++ 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
 Tue Oct 14 19:06:45 2014
@@ -19,6 +19,10 @@
 //The tests here are heavily based on some timing, so there is some chance to 
fail.
 package org.apache.hadoop.hive.hooks;
 
+import java.io.Serializable;
+import java.lang.Override;
+import java.sql.Statement;
+import java.util.List;
 import java.util.Properties;
 
 import junit.framework.Assert;
@@ -27,43 +31,98 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext;
 import org.apache.hadoop.hive.ql.hooks.HookContext;
 import org.apache.hadoop.hive.ql.hooks.HookContext.HookType;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHook;
+import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hive.jdbc.HiveConnection;
 import org.apache.hive.service.server.HiveServer2;
 import org.junit.AfterClass;
+import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Tests information retrieved from hooks.
  */
 public class TestHs2Hooks {
-
+  private static final Logger LOG = 
LoggerFactory.getLogger(TestHs2Hooks.class);
   private static HiveServer2 hiveServer2;
 
-  public static class PreExecHook implements ExecuteWithHookContext {
-    public static String userName = null;
-    public static String ipAddress = null;
+  public static class PostExecHook implements ExecuteWithHookContext {
+    public static String userName;
+    public static String ipAddress;
+    public static String operation;
+    public static Throwable error;
 
     public void run(HookContext hookContext) {
-      if (hookContext.getHookType().equals(HookType.PRE_EXEC_HOOK)) {
-        Assert.assertNotNull(hookContext.getIpAddress(), "IP Address is null");
-        ipAddress = hookContext.getIpAddress();
-        Assert.assertNotNull(hookContext.getUserName(), "Username is null");
-        userName = hookContext.getUserName();
+      try {
+        if (hookContext.getHookType().equals(HookType.POST_EXEC_HOOK)) {
+          ipAddress = hookContext.getIpAddress();
+          userName = hookContext.getUserName();
+          operation = hookContext.getOperationName();
+        }
+      } catch (Throwable t) {
+        LOG.error("Error in PostExecHook: " + t, t);
+        error = t;
       }
     }
   }
 
-  public static class PostExecHook implements ExecuteWithHookContext {
-    public static String userName = null;
-    public static String ipAddress = null;
+  public static class PreExecHook implements ExecuteWithHookContext {
+    public static String userName;
+    public static String ipAddress;
+    public static String operation;
+    public static Throwable error;
 
     public void run(HookContext hookContext) {
-      if (hookContext.getHookType().equals(HookType.POST_EXEC_HOOK)) {
-        Assert.assertNotNull(hookContext.getIpAddress(), "IP Address is null");
-        ipAddress = hookContext.getIpAddress();
-        Assert.assertNotNull(hookContext.getUserName(), "Username is null");
-        userName = hookContext.getUserName();
+      try {
+        if (hookContext.getHookType().equals(HookType.PRE_EXEC_HOOK)) {
+          ipAddress = hookContext.getIpAddress();
+          userName = hookContext.getUserName();
+          operation = hookContext.getOperationName();
+        }
+      } catch (Throwable t) {
+        LOG.error("Error in PreExecHook: " + t, t);
+        error = t;
+      }
+    }
+  }
+
+  public static class SemanticAnalysisHook implements HiveSemanticAnalyzerHook 
{
+    public static String userName;
+    public static String command;
+    public static String ipAddress;
+    public static Throwable preAnalyzeError;
+    public static Throwable postAnalyzeError;
+
+    @Override
+    public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context,
+        ASTNode ast) throws SemanticException {
+      try {
+        userName = context.getUserName();
+        ipAddress = context.getIpAddress();
+        command = context.getCommand();
+      } catch (Throwable t) {
+        LOG.error("Error in semantic analysis hook preAnalyze: " + t, t);
+        preAnalyzeError = t;
+      }
+      return ast;
+    }
+
+    @Override
+    public void postAnalyze(HiveSemanticAnalyzerHookContext context,
+        List<Task<? extends Serializable>> rootTasks) throws SemanticException 
{
+      try {
+        userName = context.getUserName();
+        ipAddress = context.getIpAddress();
+        command = context.getCommand();
+      } catch (Throwable t) {
+        LOG.error("Error in semantic analysis hook postAnalyze: " + t, t);
+        postAnalyzeError = t;
       }
     }
   }
@@ -78,6 +137,8 @@ public class TestHs2Hooks {
         PreExecHook.class.getName());
     hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS,
         PostExecHook.class.getName());
+    hiveConf.setVar(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK,
+        SemanticAnalysisHook.class.getName());
 
     hiveServer2 = new HiveServer2();
     hiveServer2.init(hiveConf);
@@ -92,28 +153,75 @@ public class TestHs2Hooks {
     }
   }
 
+  @Before
+  public void setUpTest() throws Exception {
+    PreExecHook.userName = null;
+    PreExecHook.ipAddress = null;
+    PreExecHook.operation = null;
+    PreExecHook.error = null;
+    PostExecHook.userName = null;
+    PostExecHook.ipAddress = null;
+    PostExecHook.operation = null;
+    PostExecHook.error = null;
+    SemanticAnalysisHook.userName = null;
+    SemanticAnalysisHook.ipAddress = null;
+    SemanticAnalysisHook.command = null;
+    SemanticAnalysisHook.preAnalyzeError = null;
+    SemanticAnalysisHook.postAnalyzeError = null;
+  }
+
   /**
-   * Test get IpAddress and username from hook.
-   * @throws Exception
+   * Test that hook context properties are correctly set.
    */
   @Test
-  public void testIpUserName() throws Exception {
+  public void testHookContexts() throws Throwable {
     Properties connProp = new Properties();
     connProp.setProperty("user", System.getProperty("user.name"));
     connProp.setProperty("password", "");
     HiveConnection connection = new 
HiveConnection("jdbc:hive2://localhost:10000/default", connProp);
-
-    connection.createStatement().execute("show tables");
+    Statement stmt = connection.createStatement();
+    stmt.executeQuery("show databases");
+    stmt.executeQuery("show tables");
+    Throwable error = PostExecHook.error;
+    if (error != null) {
+      throw error;
+    }
+    error = PreExecHook.error;
+    if (error != null) {
+      throw error;
+    }
 
     Assert.assertEquals(System.getProperty("user.name"), 
PostExecHook.userName);
-    Assert.assertNotNull(PostExecHook.ipAddress);
-    Assert.assertTrue(PostExecHook.ipAddress.contains("127.0.0.1"));
+    Assert.assertNotNull(PostExecHook.ipAddress, "ipaddress is null");
+    Assert.assertNotNull(PostExecHook.userName, "userName is null");
+    Assert.assertNotNull(PostExecHook.operation , "operation is null");
+    Assert.assertTrue(PostExecHook.ipAddress, 
PostExecHook.ipAddress.contains("127.0.0.1"));
+    Assert.assertEquals("SHOWTABLES", PostExecHook.operation);
 
     Assert.assertEquals(System.getProperty("user.name"), PreExecHook.userName);
-    Assert.assertNotNull(PreExecHook.ipAddress);
-    Assert.assertTrue(PreExecHook.ipAddress.contains("127.0.0.1"));
+    Assert.assertNotNull(PreExecHook.ipAddress, "ipaddress is null");
+    Assert.assertNotNull(PreExecHook.userName, "userName is null");
+    Assert.assertNotNull(PreExecHook.operation , "operation is null");
+    Assert.assertTrue(PreExecHook.ipAddress, 
PreExecHook.ipAddress.contains("127.0.0.1"));
+    Assert.assertEquals("SHOWTABLES", PreExecHook.operation);
+
+    error = SemanticAnalysisHook.preAnalyzeError;
+    if (error != null) {
+      throw error;
+    }
+    error = SemanticAnalysisHook.postAnalyzeError;
+    if (error != null) {
+      throw error;
+    }
 
-    connection.close();
+    Assert.assertNotNull(SemanticAnalysisHook.ipAddress,
+        "semantic hook context ipaddress is null");
+    Assert.assertNotNull(SemanticAnalysisHook.userName,
+        "semantic hook context userName is null");
+    Assert.assertNotNull(SemanticAnalysisHook.command ,
+        "semantic hook context command is null");
+    Assert.assertTrue(SemanticAnalysisHook.ipAddress,
+        SemanticAnalysisHook.ipAddress.contains("127.0.0.1"));
+    Assert.assertEquals("show tables", SemanticAnalysisHook.command);
   }
 }
-

Modified: 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
 (original)
+++ 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
 Tue Oct 14 19:06:45 2014
@@ -1115,6 +1115,12 @@ public class TestJdbcDriver extends Test
 
   }
 
+  public void testInvalidUrl() throws SQLException {
+    HiveDriver driver = new HiveDriver();
+
+    assertNull(driver.connect("jdbc:hive2://localhost:1000", null));
+  }
+
   private static void assertDpi(DriverPropertyInfo dpi, String name,
       String value) {
     assertEquals("Invalid DriverPropertyInfo name", name, dpi.name);

Modified: 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java
 (original)
+++ 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java
 Tue Oct 14 19:06:45 2014
@@ -76,6 +76,21 @@ public class TestHiveMetaStoreTxns {
   }
 
   @Test
+  public void testOpenTxnNotExcluded() throws Exception {
+    List<Long> tids = client.openTxns("me", 3).getTxn_ids();
+    Assert.assertEquals(1L, (long) tids.get(0));
+    Assert.assertEquals(2L, (long) tids.get(1));
+    Assert.assertEquals(3L, (long) tids.get(2));
+    client.rollbackTxn(1);
+    client.commitTxn(2);
+    ValidTxnList validTxns = client.getValidTxns(3);
+    Assert.assertFalse(validTxns.isTxnCommitted(1));
+    Assert.assertTrue(validTxns.isTxnCommitted(2));
+    Assert.assertTrue(validTxns.isTxnCommitted(3));
+    Assert.assertFalse(validTxns.isTxnCommitted(4));
+  }
+
+  @Test
   public void testTxnRange() throws Exception {
     ValidTxnList validTxns = client.getValidTxns();
     Assert.assertEquals(ValidTxnList.RangeResponse.NONE,

Modified: 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java
 (original)
+++ 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java
 Tue Oct 14 19:06:45 2014
@@ -193,43 +193,39 @@ public class TestMetaStoreEventListener 
 
     driver.run("create database " + dbName);
     listSize++;
+    PreCreateDatabaseEvent preDbEvent = 
(PreCreateDatabaseEvent)(preNotifyList.get(preNotifyList.size() - 1));
     Database db = msc.getDatabase(dbName);
     assertEquals(listSize, notifyList.size());
-    assertEquals(listSize, preNotifyList.size());
+    assertEquals(listSize + 1, preNotifyList.size());
+    validateCreateDb(db, preDbEvent.getDatabase());
 
     CreateDatabaseEvent dbEvent = 
(CreateDatabaseEvent)(notifyList.get(listSize - 1));
     assert dbEvent.getStatus();
     validateCreateDb(db, dbEvent.getDatabase());
 
-    PreCreateDatabaseEvent preDbEvent = 
(PreCreateDatabaseEvent)(preNotifyList.get(listSize - 1));
-    validateCreateDb(db, preDbEvent.getDatabase());
 
     driver.run("use " + dbName);
     driver.run(String.format("create table %s (a string) partitioned by (b 
string)", tblName));
+    PreCreateTableEvent preTblEvent = 
(PreCreateTableEvent)(preNotifyList.get(preNotifyList.size() - 1));
     listSize++;
     Table tbl = msc.getTable(dbName, tblName);
+    validateCreateTable(tbl, preTblEvent.getTable());
     assertEquals(notifyList.size(), listSize);
-    assertEquals(preNotifyList.size(), listSize);
 
     CreateTableEvent tblEvent = (CreateTableEvent)(notifyList.get(listSize - 
1));
     assert tblEvent.getStatus();
     validateCreateTable(tbl, tblEvent.getTable());
 
-    PreCreateTableEvent preTblEvent = 
(PreCreateTableEvent)(preNotifyList.get(listSize - 1));
-    validateCreateTable(tbl, preTblEvent.getTable());
-
     driver.run("alter table tmptbl add partition (b='2011')");
     listSize++;
-    Partition part = msc.getPartition("hive2038", "tmptbl", "b=2011");
     assertEquals(notifyList.size(), listSize);
-    assertEquals(preNotifyList.size(), listSize);
+    PreAddPartitionEvent prePartEvent = 
(PreAddPartitionEvent)(preNotifyList.get(preNotifyList.size() - 1));
 
     AddPartitionEvent partEvent = 
(AddPartitionEvent)(notifyList.get(listSize-1));
     assert partEvent.getStatus();
+    Partition part = msc.getPartition("hive2038", "tmptbl", "b=2011");
     validateAddPartition(part, partEvent.getPartitions().get(0));
     validateTableInAddPartition(tbl, partEvent.getTable());
-
-    PreAddPartitionEvent prePartEvent = 
(PreAddPartitionEvent)(preNotifyList.get(listSize-1));
     validateAddPartition(part, prePartEvent.getPartitions().get(0));
 
     // Test adding multiple partitions in a single partition-set, atomically.
@@ -254,7 +250,8 @@ public class TestMetaStoreEventListener 
     driver.run(String.format("alter table %s touch partition (%s)", tblName, 
"b='2011'"));
     listSize++;
     assertEquals(notifyList.size(), listSize);
-    assertEquals(preNotifyList.size(), listSize);
+    PreAlterPartitionEvent preAlterPartEvent =
+        (PreAlterPartitionEvent)preNotifyList.get(preNotifyList.size() - 1);
 
     //the partition did not change,
     // so the new partition should be similar to the original partition
@@ -266,40 +263,39 @@ public class TestMetaStoreEventListener 
         alterPartEvent.getOldPartition().getTableName(),
         alterPartEvent.getOldPartition().getValues(), 
alterPartEvent.getNewPartition());
 
-    PreAlterPartitionEvent preAlterPartEvent =
-        (PreAlterPartitionEvent)preNotifyList.get(listSize - 1);
+
     validateAlterPartition(origP, origP, preAlterPartEvent.getDbName(),
         preAlterPartEvent.getTableName(), 
preAlterPartEvent.getNewPartition().getValues(),
         preAlterPartEvent.getNewPartition());
 
     List<String> part_vals = new ArrayList<String>();
     part_vals.add("c=2012");
+    int preEventListSize;
+    preEventListSize = preNotifyList.size() + 1;
     Partition newPart = msc.appendPartition(dbName, tblName, part_vals);
 
     listSize++;
     assertEquals(notifyList.size(), listSize);
-    assertEquals(preNotifyList.size(), listSize);
+    assertEquals(preNotifyList.size(), preEventListSize);
 
     AddPartitionEvent appendPartEvent =
         (AddPartitionEvent)(notifyList.get(listSize-1));
     validateAddPartition(newPart, appendPartEvent.getPartitions().get(0));
 
     PreAddPartitionEvent preAppendPartEvent =
-        (PreAddPartitionEvent)(preNotifyList.get(listSize-1));
+        (PreAddPartitionEvent)(preNotifyList.get(preNotifyList.size() - 1));
     validateAddPartition(newPart, preAppendPartEvent.getPartitions().get(0));
 
     driver.run(String.format("alter table %s rename to %s", tblName, renamed));
     listSize++;
     assertEquals(notifyList.size(), listSize);
-    assertEquals(preNotifyList.size(), listSize);
+    PreAlterTableEvent preAlterTableE = (PreAlterTableEvent) 
preNotifyList.get(preNotifyList.size() - 1);
 
     Table renamedTable = msc.getTable(dbName, renamed);
 
     AlterTableEvent alterTableE = (AlterTableEvent) notifyList.get(listSize-1);
     assert alterTableE.getStatus();
     validateAlterTable(tbl, renamedTable, alterTableE.getOldTable(), 
alterTableE.getNewTable());
-
-    PreAlterTableEvent preAlterTableE = (PreAlterTableEvent) 
preNotifyList.get(listSize-1);
     validateAlterTable(tbl, renamedTable, preAlterTableE.getOldTable(),
         preAlterTableE.getNewTable());
 
@@ -307,20 +303,17 @@ public class TestMetaStoreEventListener 
     driver.run(String.format("alter table %s rename to %s", renamed, tblName));
     listSize++;
     assertEquals(notifyList.size(), listSize);
-    assertEquals(preNotifyList.size(), listSize);
 
     driver.run(String.format("alter table %s ADD COLUMNS (c int)", tblName));
     listSize++;
     assertEquals(notifyList.size(), listSize);
-    assertEquals(preNotifyList.size(), listSize);
+    preAlterTableE = (PreAlterTableEvent) 
preNotifyList.get(preNotifyList.size() - 1);
 
     Table altTable = msc.getTable(dbName, tblName);
 
     alterTableE = (AlterTableEvent) notifyList.get(listSize-1);
     assert alterTableE.getStatus();
     validateAlterTableColumns(tbl, altTable, alterTableE.getOldTable(), 
alterTableE.getNewTable());
-
-    preAlterTableE = (PreAlterTableEvent) preNotifyList.get(listSize-1);
     validateAlterTableColumns(tbl, altTable, preAlterTableE.getOldTable(),
         preAlterTableE.getNewTable());
 
@@ -329,7 +322,6 @@ public class TestMetaStoreEventListener 
     msc.markPartitionForEvent("hive2038", "tmptbl", kvs, 
PartitionEventType.LOAD_DONE);
     listSize++;
     assertEquals(notifyList.size(), listSize);
-    assertEquals(preNotifyList.size(), listSize);
 
     LoadPartitionDoneEvent partMarkEvent = 
(LoadPartitionDoneEvent)notifyList.get(listSize - 1);
     assert partMarkEvent.getStatus();
@@ -337,46 +329,42 @@ public class TestMetaStoreEventListener 
         partMarkEvent.getPartitionName());
 
     PreLoadPartitionDoneEvent prePartMarkEvent =
-        (PreLoadPartitionDoneEvent)preNotifyList.get(listSize - 1);
+        (PreLoadPartitionDoneEvent)preNotifyList.get(preNotifyList.size() - 1);
     validateLoadPartitionDone("tmptbl", kvs, prePartMarkEvent.getTableName(),
         prePartMarkEvent.getPartitionName());
 
     driver.run(String.format("alter table %s drop partition (b='2011')", 
tblName));
     listSize++;
     assertEquals(notifyList.size(), listSize);
-    assertEquals(preNotifyList.size(), listSize);
+    PreDropPartitionEvent preDropPart = (PreDropPartitionEvent) 
preNotifyList.get(preNotifyList
+        .size() - 1);
 
     DropPartitionEvent dropPart = (DropPartitionEvent)notifyList.get(listSize 
- 1);
     assert dropPart.getStatus();
     validateDropPartition(part, dropPart.getPartition());
     validateTableInDropPartition(tbl, dropPart.getTable());
 
-    PreDropPartitionEvent preDropPart = 
(PreDropPartitionEvent)preNotifyList.get(listSize - 1);
     validateDropPartition(part, preDropPart.getPartition());
     validateTableInDropPartition(tbl, preDropPart.getTable());
 
     driver.run("drop table " + tblName);
     listSize++;
     assertEquals(notifyList.size(), listSize);
-    assertEquals(preNotifyList.size(), listSize);
+    PreDropTableEvent preDropTbl = 
(PreDropTableEvent)preNotifyList.get(preNotifyList.size() - 1);
 
     DropTableEvent dropTbl = (DropTableEvent)notifyList.get(listSize-1);
     assert dropTbl.getStatus();
     validateDropTable(tbl, dropTbl.getTable());
-
-    PreDropTableEvent preDropTbl = 
(PreDropTableEvent)preNotifyList.get(listSize-1);
     validateDropTable(tbl, preDropTbl.getTable());
 
     driver.run("drop database " + dbName);
     listSize++;
     assertEquals(notifyList.size(), listSize);
-    assertEquals(preNotifyList.size(), listSize);
+    PreDropDatabaseEvent preDropDB = 
(PreDropDatabaseEvent)preNotifyList.get(preNotifyList.size() - 1);
 
     DropDatabaseEvent dropDB = (DropDatabaseEvent)notifyList.get(listSize-1);
     assert dropDB.getStatus();
     validateDropDb(db, dropDB.getDatabase());
-
-    PreDropDatabaseEvent preDropDB = 
(PreDropDatabaseEvent)preNotifyList.get(listSize-1);
     validateDropDb(db, preDropDB.getDatabase());
 
     SetProcessor.setVariable("metaconf:hive.metastore.try.direct.sql", 
"false");

Modified: 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStoreIpAddress.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStoreIpAddress.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStoreIpAddress.java
 (original)
+++ 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStoreIpAddress.java
 Tue Oct 14 19:06:45 2014
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.metastore
 import junit.framework.TestCase;
 
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.util.StringUtils;
@@ -49,6 +50,8 @@ public class TestRemoteHiveMetaStoreIpAd
 
     int port = MetaStoreUtils.findFreePort();
     System.out.println("Starting MetaStore Server on port " + port);
+    System.setProperty(ConfVars.METASTORE_EVENT_LISTENERS.varname,
+        IpAddressListener.class.getName());
     MetaStoreUtils.startMetaStore(port, 
ShimLoader.getHadoopThriftAuthBridge());
     isServerStarted = true;
 

Modified: 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
 (original)
+++ 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
 Tue Oct 14 19:06:45 2014
@@ -139,7 +139,7 @@ public class TestHiveHistory extends Tes
 
       SessionState.start(ss);
 
-      String cmd = "select a.key from src a";
+      String cmd = "select a.key+1 from src a";
       Driver d = new Driver(conf);
       int ret = d.run(cmd).getResponseCode();
       if (ret != 0) {

Modified: 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java
 (original)
+++ 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java
 Tue Oct 14 19:06:45 2014
@@ -52,7 +52,7 @@ public abstract class FolderPermissionBa
   protected static Path warehouseDir;
   protected static Path baseDfsDir;
 
-  public static final PathFilter hiddenFileFilter = new PathFilter(){
+  protected static final PathFilter hiddenFileFilter = new PathFilter(){
     public boolean accept(Path p){
       String name = p.getName();
       return !name.startsWith("_") && !name.startsWith(".");
@@ -591,7 +591,7 @@ public abstract class FolderPermissionBa
 
   private List<String> listStatus(String locn) throws Exception {
     List<String> results = new ArrayList<String>();
-    FileStatus[] listStatus = fs.listStatus(new Path(locn));
+    FileStatus[] listStatus = fs.listStatus(new Path(locn), hiddenFileFilter);
     for (FileStatus status : listStatus) {
       results.add(status.getPath().toString());
     }

Modified: 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java
 (original)
+++ 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java
 Tue Oct 14 19:06:45 2014
@@ -171,41 +171,36 @@ public class TestAuthorizationPreEventLi
 
     driver.run("create database " + dbName);
     listSize++;
-    Database db = msc.getDatabase(dbName);
-
     Database dbFromEvent = 
(Database)assertAndExtractSingleObjectFromEvent(listSize, authCalls,
         DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.DB);
+    Database db = msc.getDatabase(dbName);
     validateCreateDb(db,dbFromEvent);
 
     driver.run("use " + dbName);
     driver.run(String.format("create table %s (a string) partitioned by (b 
string)", tblName));
-    listSize++;
-    Table tbl = msc.getTable(dbName, tblName);
+    listSize = authCalls.size();
 
     Table tblFromEvent = (
         (org.apache.hadoop.hive.ql.metadata.Table)
         assertAndExtractSingleObjectFromEvent(listSize, authCalls,
             DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.TABLE))
             .getTTable();
+    Table tbl = msc.getTable(dbName, tblName);
     validateCreateTable(tbl, tblFromEvent);
 
     driver.run("alter table tmptbl add partition (b='2011')");
-    listSize++;
-    Partition part = msc.getPartition("hive3705", "tmptbl", "b=2011");
+    listSize = authCalls.size();
 
     Partition ptnFromEvent = (
         (org.apache.hadoop.hive.ql.metadata.Partition)
         assertAndExtractSingleObjectFromEvent(listSize, authCalls,
             
DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.PARTITION))
             .getTPartition();
+    Partition part = msc.getPartition("hive3705", "tmptbl", "b=2011");
     validateAddPartition(part,ptnFromEvent);
 
     driver.run(String.format("alter table %s touch partition (%s)", tblName, 
"b='2011'"));
-    listSize++;
-
-    //the partition did not change,
-    // so the new partition should be similar to the original partition
-    Partition modifiedP = msc.getPartition(dbName, tblName, "b=2011");
+    listSize = authCalls.size();
 
     Partition ptnFromEventAfterAlter = (
         (org.apache.hadoop.hive.ql.metadata.Partition)
@@ -213,6 +208,9 @@ public class TestAuthorizationPreEventLi
             
DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.PARTITION))
             .getTPartition();
 
+    //the partition did not change,
+    // so the new partition should be similar to the original partition
+    Partition modifiedP = msc.getPartition(dbName, tblName, "b=2011");
     validateAlterPartition(part, modifiedP, ptnFromEventAfterAlter.getDbName(),
         ptnFromEventAfterAlter.getTableName(), 
ptnFromEventAfterAlter.getValues(),
         ptnFromEventAfterAlter);
@@ -220,8 +218,9 @@ public class TestAuthorizationPreEventLi
 
     List<String> part_vals = new ArrayList<String>();
     part_vals.add("c=2012");
-    Partition newPart = msc.appendPartition(dbName, tblName, part_vals);
 
+    listSize = authCalls.size();
+    Partition newPart = msc.appendPartition(dbName, tblName, part_vals);
     listSize++;
 
     Partition newPtnFromEvent = (
@@ -233,25 +232,23 @@ public class TestAuthorizationPreEventLi
 
 
     driver.run(String.format("alter table %s rename to %s", tblName, renamed));
-    listSize++;
+    listSize = authCalls.size();
 
-    Table renamedTable = msc.getTable(dbName, renamed);
     Table renamedTableFromEvent = (
         (org.apache.hadoop.hive.ql.metadata.Table)
         assertAndExtractSingleObjectFromEvent(listSize, authCalls,
             DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.TABLE))
             .getTTable();
 
+    Table renamedTable = msc.getTable(dbName, renamed);
     validateAlterTable(tbl, renamedTable, renamedTableFromEvent,
         renamedTable);
     assertFalse(tbl.getTableName().equals(renamedTable.getTableName()));
 
     //change the table name back
     driver.run(String.format("alter table %s rename to %s", renamed, tblName));
-    listSize++;
-
     driver.run(String.format("alter table %s drop partition (b='2011')", 
tblName));
-    listSize++;
+    listSize = authCalls.size();
 
     Partition ptnFromDropPartition = (
         (org.apache.hadoop.hive.ql.metadata.Partition)
@@ -262,7 +259,7 @@ public class TestAuthorizationPreEventLi
     validateDropPartition(modifiedP, ptnFromDropPartition);
 
     driver.run("drop table " + tblName);
-    listSize++;
+    listSize = authCalls.size();
     Table tableFromDropTableEvent = (
         (org.apache.hadoop.hive.ql.metadata.Table)
         assertAndExtractSingleObjectFromEvent(listSize, authCalls,
@@ -290,16 +287,16 @@ public class TestAuthorizationPreEventLi
     }
 
     tCustom.setTableName(tbl.getTableName() + "_custom");
+    listSize = authCalls.size();
     msc.createTable(tCustom);
     listSize++;
 
-    Table customCreatedTable = msc.getTable(tCustom.getDbName(), 
tCustom.getTableName());
     Table customCreatedTableFromEvent = (
         (org.apache.hadoop.hive.ql.metadata.Table)
             assertAndExtractSingleObjectFromEvent(listSize, authCalls,
                 
DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.TABLE))
         .getTTable();
-
+    Table customCreatedTable = msc.getTable(tCustom.getDbName(), 
tCustom.getTableName());
     validateCreateTable(tCustom,customCreatedTable);
     validateCreateTable(tCustom,customCreatedTableFromEvent);
 
@@ -316,8 +313,10 @@ public class TestAuthorizationPreEventLi
     assertEquals(tCustom.getSd().getSerdeInfo().getSerializationLib(),
         
customCreatedTableFromEvent.getSd().getSerdeInfo().getSerializationLib());
 
-    msc.dropTable(tCustom.getDbName(),tCustom.getTableName());
-    listSize++;
+    listSize = authCalls.size();
+    msc.dropTable(tCustom.getDbName(), tCustom.getTableName());
+    listSize += 2;
+
     Table table2FromDropTableEvent = (
         (org.apache.hadoop.hive.ql.metadata.Table)
             assertAndExtractSingleObjectFromEvent(listSize, authCalls,
@@ -327,7 +326,7 @@ public class TestAuthorizationPreEventLi
     validateDropTable(tCustom, table2FromDropTableEvent);
 
     driver.run("drop database " + dbName);
-    listSize++;
+    listSize = authCalls.size();
     Database dbFromDropDatabaseEvent =
         (Database)assertAndExtractSingleObjectFromEvent(listSize, authCalls,
         DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.DB);

Modified: 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java
 (original)
+++ 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java
 Tue Oct 14 19:06:45 2014
@@ -89,6 +89,7 @@ public class TestMetastoreAuthorizationP
         AuthorizationPreEventListener.class.getName());
     
System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER.varname,
         getAuthorizationProvider());
+    setupMetaStoreReadAuthorization();
     
System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER.varname,
         InjectableDummyAuthenticator.class.getName());
     
System.setProperty(HiveConf.ConfVars.HIVE_AUTHORIZATION_TABLE_OWNER_GRANTS.varname,
 "");
@@ -115,6 +116,13 @@ public class TestMetastoreAuthorizationP
     driver = new Driver(clientHiveConf);
   }
 
+  protected void setupMetaStoreReadAuthorization() {
+    // read authorization does not work with default/legacy authorization mode
+    // It is a chicken and egg problem granting select privilege to database, 
as the
+    // grant statement would invoke get_database which needs select privilege
+    
System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_AUTH_READS.varname,
 "false");
+  }
+
   @Override
   protected void tearDown() throws Exception {
     super.tearDown();

Modified: 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java
 (original)
+++ 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java
 Tue Oct 14 19:06:45 2014
@@ -88,6 +88,7 @@ public class TestMultiAuthorizationPreEv
 
     // verify that the actual action also went through
     Database db = msc.getDatabase(dbName);
+    listSize += 2; // 1 read database auth calls for each authorization 
provider
     Database dbFromEvent = 
(Database)assertAndExtractSingleObjectFromEvent(listSize, authCalls,
         DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.DB);
     validateCreateDb(db,dbFromEvent);

Modified: 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java
 (original)
+++ 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java
 Tue Oct 14 19:06:45 2014
@@ -18,88 +18,19 @@
 
 package org.apache.hadoop.hive.ql.security;
 
-import java.net.URI;
-import java.util.ArrayList;
-import java.util.List;
-
-import junit.framework.TestCase;
-
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hive.cli.CliSessionState;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
-import 
org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener;
-import 
org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.shims.ShimLoader;
-import org.apache.hadoop.security.UserGroupInformation;
+import org.junit.Assert;
+import org.junit.Test;
 
 /**
  * Test cases focusing on drop table permission checks
  */
-public class TestStorageBasedMetastoreAuthorizationDrops extends TestCase{
-  protected HiveConf clientHiveConf;
-  protected HiveMetaStoreClient msc;
-  protected Driver driver;
-  protected UserGroupInformation ugi;
-  private static int objNum = 0;
-
-  protected String getAuthorizationProvider(){
-    return StorageBasedAuthorizationProvider.class.getName();
-  }
-
-  protected HiveConf createHiveConf() throws Exception {
-    return new HiveConf(this.getClass());
-  }
-
-  @Override
-  protected void setUp() throws Exception {
-
-    super.setUp();
-
-    int port = MetaStoreUtils.findFreePort();
-
-    // Turn on metastore-side authorization
-    System.setProperty(HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname,
-        AuthorizationPreEventListener.class.getName());
-    
System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER.varname,
-        getAuthorizationProvider());
-    
System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER.varname,
-        InjectableDummyAuthenticator.class.getName());
-
-    MetaStoreUtils.startMetaStore(port, 
ShimLoader.getHadoopThriftAuthBridge());
-
-    clientHiveConf = createHiveConf();
-
-    // Turn off client-side authorization
-    
clientHiveConf.setBoolVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED,false);
-
-    clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, 
"thrift://localhost:" + port);
-    
clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
-    clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, 
"false");
-
-    clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
-    clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
-
-    ugi = ShimLoader.getHadoopShims().getUGIForConf(clientHiveConf);
-
-    SessionState.start(new CliSessionState(clientHiveConf));
-    msc = new HiveMetaStoreClient(clientHiveConf, null);
-    driver = new Driver(clientHiveConf);
-
-    setupFakeUser();
-    InjectableDummyAuthenticator.injectMode(false);
-  }
-
+public class TestStorageBasedMetastoreAuthorizationDrops extends 
StorageBasedMetastoreTestBase {
 
+  @Test
   public void testDropDatabase() throws Exception {
     dropDatabaseByOtherUser("-rwxrwxrwx", 0);
     dropDatabaseByOtherUser("-rwxrwxrwt", 1);
@@ -111,12 +42,12 @@ public class TestStorageBasedMetastoreAu
    * @param expectedRet - expected return code for drop by other user
    * @throws Exception
    */
-  private void dropDatabaseByOtherUser(String perm, int expectedRet) throws 
Exception {
+  public void dropDatabaseByOtherUser(String perm, int expectedRet) throws 
Exception {
     String dbName = getTestDbName();
     setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), perm);
 
     CommandProcessorResponse resp = driver.run("create database " + dbName);
-    assertEquals(0, resp.getResponseCode());
+    Assert.assertEquals(0, resp.getResponseCode());
     Database db = msc.getDatabase(dbName);
     validateCreateDb(db, dbName);
 
@@ -124,10 +55,11 @@ public class TestStorageBasedMetastoreAu
 
 
     resp = driver.run("drop database " + dbName);
-    assertEquals(expectedRet, resp.getResponseCode());
+    Assert.assertEquals(expectedRet, resp.getResponseCode());
 
   }
 
+  @Test
   public void testDropTable() throws Exception {
     dropTableByOtherUser("-rwxrwxrwx", 0);
     dropTableByOtherUser("-rwxrwxrwt", 1);
@@ -138,13 +70,13 @@ public class TestStorageBasedMetastoreAu
    * @param expectedRet expected return code on drop table
    * @throws Exception
    */
-  private void dropTableByOtherUser(String perm, int expectedRet) throws 
Exception {
+  public void dropTableByOtherUser(String perm, int expectedRet) throws 
Exception {
     String dbName = getTestDbName();
     String tblName = getTestTableName();
     setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), 
"-rwxrwxrwx");
 
     CommandProcessorResponse resp = driver.run("create database " + dbName);
-    assertEquals(0, resp.getResponseCode());
+    Assert.assertEquals(0, resp.getResponseCode());
     Database db = msc.getDatabase(dbName);
     validateCreateDb(db, dbName);
 
@@ -152,18 +84,19 @@ public class TestStorageBasedMetastoreAu
 
     String dbDotTable = dbName + "." + tblName;
     resp = driver.run("create table " + dbDotTable + "(i int)");
-    assertEquals(0, resp.getResponseCode());
+    Assert.assertEquals(0, resp.getResponseCode());
 
 
     InjectableDummyAuthenticator.injectMode(true);
     resp = driver.run("drop table " + dbDotTable);
-    assertEquals(expectedRet, resp.getResponseCode());
+    Assert.assertEquals(expectedRet, resp.getResponseCode());
   }
 
   /**
    * Drop view should not be blocked by SBA. View will not have any location 
to drop.
    * @throws Exception
    */
+  @Test
   public void testDropView() throws Exception {
     String dbName = getTestDbName();
     String tblName = getTestTableName();
@@ -171,7 +104,7 @@ public class TestStorageBasedMetastoreAu
     setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), 
"-rwxrwxrwx");
 
     CommandProcessorResponse resp = driver.run("create database " + dbName);
-    assertEquals(0, resp.getResponseCode());
+    Assert.assertEquals(0, resp.getResponseCode());
     Database db = msc.getDatabase(dbName);
     validateCreateDb(db, dbName);
 
@@ -179,20 +112,20 @@ public class TestStorageBasedMetastoreAu
 
     String dbDotTable = dbName + "." + tblName;
     resp = driver.run("create table " + dbDotTable + "(i int)");
-    assertEquals(0, resp.getResponseCode());
+    Assert.assertEquals(0, resp.getResponseCode());
 
     String dbDotView = dbName + "." + viewName;
     resp = driver.run("create view " + dbDotView + " as select * from " +  
dbDotTable);
-    assertEquals(0, resp.getResponseCode());
+    Assert.assertEquals(0, resp.getResponseCode());
 
     resp = driver.run("drop view " + dbDotView);
-    assertEquals(0, resp.getResponseCode());
+    Assert.assertEquals(0, resp.getResponseCode());
 
     resp = driver.run("drop table " + dbDotTable);
-    assertEquals(0, resp.getResponseCode());
+    Assert.assertEquals(0, resp.getResponseCode());
   }
 
-
+  @Test
   public void testDropPartition() throws Exception {
     dropPartitionByOtherUser("-rwxrwxrwx", 0);
     dropPartitionByOtherUser("-rwxrwxrwt", 1);
@@ -203,70 +136,29 @@ public class TestStorageBasedMetastoreAu
    * @param expectedRet expected return code
    * @throws Exception
    */
-  private void dropPartitionByOtherUser(String perm, int expectedRet) throws 
Exception {
+  public void dropPartitionByOtherUser(String perm, int expectedRet) throws 
Exception {
     String dbName = getTestDbName();
     String tblName = getTestTableName();
     setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), 
"-rwxrwxrwx");
 
     CommandProcessorResponse resp = driver.run("create database " + dbName);
-    assertEquals(0, resp.getResponseCode());
+    Assert.assertEquals(0, resp.getResponseCode());
     Database db = msc.getDatabase(dbName);
     validateCreateDb(db, dbName);
     setPermissions(db.getLocationUri(), "-rwxrwxrwx");
 
     String dbDotTable = dbName + "." + tblName;
     resp = driver.run("create table " + dbDotTable + "(i int) partitioned by 
(b string)");
-    assertEquals(0, resp.getResponseCode());
+    Assert.assertEquals(0, resp.getResponseCode());
     Table tab = msc.getTable(dbName, tblName);
     setPermissions(tab.getSd().getLocation(), perm);
 
     resp = driver.run("alter table " + dbDotTable + " add partition 
(b='2011')");
-    assertEquals(0, resp.getResponseCode());
+    Assert.assertEquals(0, resp.getResponseCode());
 
     InjectableDummyAuthenticator.injectMode(true);
     resp = driver.run("alter table " + dbDotTable + " drop partition 
(b='2011')");
-    assertEquals(expectedRet, resp.getResponseCode());
+    Assert.assertEquals(expectedRet, resp.getResponseCode());
   }
 
-  private void setupFakeUser() {
-    String fakeUser = "mal";
-    List<String> fakeGroupNames = new ArrayList<String>();
-    fakeGroupNames.add("groupygroup");
-
-    InjectableDummyAuthenticator.injectUserName(fakeUser);
-    InjectableDummyAuthenticator.injectGroupNames(fakeGroupNames);
-  }
-
-  private String setupUser() {
-    return ugi.getUserName();
-  }
-
-  private String getTestTableName() {
-    return this.getClass().getSimpleName() + "tab" + ++objNum;
-  }
-
-  private String getTestDbName() {
-    return this.getClass().getSimpleName() + "db" + ++objNum;
-  }
-
-  @Override
-  protected void tearDown() throws Exception {
-    super.tearDown();
-    InjectableDummyAuthenticator.injectMode(false);
-  }
-
-  protected void setPermissions(String locn, String permissions) throws 
Exception {
-    FileSystem fs = FileSystem.get(new URI(locn), clientHiveConf);
-    fs.setPermission(new Path(locn), FsPermission.valueOf(permissions));
-  }
-
-  private void validateCreateDb(Database expectedDb, String dbName) {
-    assertEquals(expectedDb.getName().toLowerCase(), dbName.toLowerCase());
-  }
-
-  private void validateCreateTable(Table expectedTable, String tblName, String 
dbName) {
-    assertNotNull(expectedTable);
-    
assertEquals(expectedTable.getTableName().toLowerCase(),tblName.toLowerCase());
-    assertEquals(expectedTable.getDbName().toLowerCase(),dbName.toLowerCase());
-  }
 }

Modified: 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java
 (original)
+++ 
hive/branches/llap/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java
 Tue Oct 14 19:06:45 2014
@@ -23,6 +23,7 @@ import java.net.URI;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import 
org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider;
 
@@ -102,4 +103,10 @@ public class TestStorageBasedMetastoreAu
     return super.getTestTableName() + "_SBAP";
   }
 
+  @Override
+  protected void setupMetaStoreReadAuthorization() {
+    // enable read authorization in metastore
+    
System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_AUTH_READS.varname,
 "true");
+  }
+
 }


Reply via email to