Author: gates
Date: Wed Mar 18 17:41:50 2015
New Revision: 1667596

URL: http://svn.apache.org/r1667596
Log:
HIVE-10008 Need to refactor itests for hbase metastore (Alan Gates)

Added:
    
hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/IMockUtils.java
Modified:
    
hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java
    
hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreSql.java
    
hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java
    
hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java

Added: 
hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/IMockUtils.java
URL: 
http://svn.apache.org/viewvc/hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/IMockUtils.java?rev=1667596&view=auto
==============================================================================
--- 
hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/IMockUtils.java
 (added)
+++ 
hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/IMockUtils.java
 Wed Mar 18 17:41:50 2015
@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.hive.metastore.hbase;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hive.cli.CliSessionState;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.MockitoAnnotations;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Integration tests with HBase Mini-cluster for HBaseStore
+ */
+public class IMockUtils {
+
+  private static final Log LOG = LogFactory.getLog(IMockUtils.class.getName());
+
+  protected static HBaseTestingUtility utility;
+  protected static HTableInterface tblTable;
+  protected static HTableInterface sdTable;
+  protected static HTableInterface partTable;
+  protected static HTableInterface dbTable;
+  protected static HTableInterface funcTable;
+  protected static HTableInterface roleTable;
+  protected static HTableInterface globalPrivsTable;
+  protected static HTableInterface principalRoleMapTable;
+  protected static Map<String, String> emptyParameters = new HashMap<String, 
String>();
+
+  @Mock
+  private HBaseConnection hconn;
+  protected HBaseStore store;
+  protected HiveConf conf;
+  protected Driver driver;
+
+  protected static void startMiniCluster() throws Exception {
+    utility = new HBaseTestingUtility();
+    utility.startMiniCluster();
+    byte[][] families = new byte[][]{HBaseReadWrite.CATALOG_CF, 
HBaseReadWrite.STATS_CF};
+    tblTable = 
utility.createTable(HBaseReadWrite.TABLE_TABLE.getBytes(HBaseUtils.ENCODING),
+        families);
+    sdTable = 
utility.createTable(HBaseReadWrite.SD_TABLE.getBytes(HBaseUtils.ENCODING),
+        HBaseReadWrite.CATALOG_CF);
+    partTable = 
utility.createTable(HBaseReadWrite.PART_TABLE.getBytes(HBaseUtils.ENCODING),
+        families);
+    dbTable = 
utility.createTable(HBaseReadWrite.DB_TABLE.getBytes(HBaseUtils.ENCODING),
+        HBaseReadWrite.CATALOG_CF);
+    funcTable = 
utility.createTable(HBaseReadWrite.FUNC_TABLE.getBytes(HBaseUtils.ENCODING),
+        HBaseReadWrite.CATALOG_CF);
+    roleTable = 
utility.createTable(HBaseReadWrite.ROLE_TABLE.getBytes(HBaseUtils.ENCODING),
+        HBaseReadWrite.CATALOG_CF);
+    globalPrivsTable =
+        
utility.createTable(HBaseReadWrite.GLOBAL_PRIVS_TABLE.getBytes(HBaseUtils.ENCODING),
+            HBaseReadWrite.CATALOG_CF);
+    principalRoleMapTable =
+        
utility.createTable(HBaseReadWrite.USER_TO_ROLE_TABLE.getBytes(HBaseUtils.ENCODING),
+            HBaseReadWrite.CATALOG_CF);
+  }
+
+  protected static void shutdownMiniCluster() throws Exception {
+    utility.shutdownMiniCluster();
+  }
+
+  protected void setupConnection() throws IOException {
+    MockitoAnnotations.initMocks(this);
+    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.SD_TABLE)).thenReturn(sdTable);
+    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.TABLE_TABLE)).thenReturn(tblTable);
+    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.PART_TABLE)).thenReturn(partTable);
+    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.DB_TABLE)).thenReturn(dbTable);
+    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.FUNC_TABLE)).thenReturn(funcTable);
+    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.ROLE_TABLE)).thenReturn(roleTable);
+    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.GLOBAL_PRIVS_TABLE)).thenReturn(
+        globalPrivsTable);
+    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.USER_TO_ROLE_TABLE)).thenReturn(
+        principalRoleMapTable);
+    conf = new HiveConf();
+  }
+
+  protected void setupDriver() {
+    conf.setVar(HiveConf.ConfVars.METASTORE_HBASE_CONNECTION_CLASS, 
HBaseReadWrite.TEST_CONN);
+    conf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict");
+    conf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL,
+        "org.apache.hadoop.hive.metastore.hbase.HBaseStore");
+    conf.setBoolVar(HiveConf.ConfVars.METASTORE_FASTPATH, true);
+    conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
+    HBaseReadWrite.setTestConnection(hconn);
+
+    SessionState.start(new CliSessionState(conf));
+    driver = new Driver(conf);
+  }
+
+  protected void setupHBaseStore() {
+    // Turn off caching, as we want to test actual interaction with HBase
+    conf.setBoolean(HBaseReadWrite.NO_CACHE_CONF, true);
+    conf.setVar(HiveConf.ConfVars.METASTORE_HBASE_CONNECTION_CLASS, 
HBaseReadWrite.TEST_CONN);
+    HBaseReadWrite.setTestConnection(hconn);
+    // HBaseReadWrite hbase = HBaseReadWrite.getInstance(conf);
+    store = new HBaseStore();
+    store.setConf(conf);
+  }
+
+}
+

Modified: 
hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java
URL: 
http://svn.apache.org/viewvc/hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java?rev=1667596&r1=1667595&r2=1667596&view=diff
==============================================================================
--- 
hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java
 (original)
+++ 
hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java
 Wed Mar 18 17:41:50 2015
@@ -20,10 +20,6 @@ package org.apache.hadoop.hive.metastore
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.ObjectStore;
 import org.apache.hadoop.hive.metastore.RawStore;
 import org.apache.hadoop.hive.metastore.api.Database;
@@ -42,83 +38,34 @@ import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
-import org.junit.Rule;
 import org.junit.Test;
-import org.junit.rules.ExpectedException;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.MockitoAnnotations;
 
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
 
 /**
  * Test that import from an RDBMS based metastore works
  */
-public class TestHBaseImport {
+public class TestHBaseImport extends IMockUtils {
 
   private static final Log LOG = 
LogFactory.getLog(TestHBaseStoreIntegration.class.getName());
 
-  private static HBaseTestingUtility utility;
-  private static HTableInterface tblTable;
-  private static HTableInterface sdTable;
-  private static HTableInterface partTable;
-  private static HTableInterface dbTable;
-  private static HTableInterface funcTable;
-  private static HTableInterface roleTable;
-  private static Map<String, String> emptyParameters = new HashMap<String, 
String>();
-
-  @Rule public ExpectedException thrown = ExpectedException.none();
-  @Mock private HBaseConnection hconn;
-  private HBaseStore store;
-  private HiveConf conf;
-
   @BeforeClass
-  public static void startMiniCluster() throws Exception {
-    utility = new HBaseTestingUtility();
-    utility.startMiniCluster();
-    byte[][] families = new byte[][] {HBaseReadWrite.CATALOG_CF, 
HBaseReadWrite.STATS_CF};
-    tblTable = 
utility.createTable(HBaseReadWrite.TABLE_TABLE.getBytes(HBaseUtils.ENCODING),
-        families);
-    sdTable = 
utility.createTable(HBaseReadWrite.SD_TABLE.getBytes(HBaseUtils.ENCODING),
-        HBaseReadWrite.CATALOG_CF);
-    partTable = 
utility.createTable(HBaseReadWrite.PART_TABLE.getBytes(HBaseUtils.ENCODING),
-        families);
-    dbTable = 
utility.createTable(HBaseReadWrite.DB_TABLE.getBytes(HBaseUtils.ENCODING),
-        HBaseReadWrite.CATALOG_CF);
-    funcTable = 
utility.createTable(HBaseReadWrite.FUNC_TABLE.getBytes(HBaseUtils.ENCODING),
-                                  HBaseReadWrite.CATALOG_CF);
-    roleTable = 
utility.createTable(HBaseReadWrite.ROLE_TABLE.getBytes(HBaseUtils.ENCODING),
-        HBaseReadWrite.CATALOG_CF);
+  public static void startup() throws Exception {
+    IMockUtils.startMiniCluster();
   }
 
   @AfterClass
-  public static void shutdownMiniCluster() throws Exception {
-    utility.shutdownMiniCluster();
+  public static void shutdown() throws Exception {
+    IMockUtils.shutdownMiniCluster();
   }
 
   @Before
-  public void setupConnection() throws IOException {
-    MockitoAnnotations.initMocks(this);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.SD_TABLE)).thenReturn(sdTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.TABLE_TABLE)).thenReturn(tblTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.PART_TABLE)).thenReturn(partTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.DB_TABLE)).thenReturn(dbTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.FUNC_TABLE)).thenReturn(funcTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.ROLE_TABLE)).thenReturn(roleTable);
-    conf = new HiveConf();
-    // Turn off caching, as we want to test actual interaction with HBase
-    conf.setBoolean(HBaseReadWrite.NO_CACHE_CONF, true);
-    conf.setVar(HiveConf.ConfVars.METASTORE_HBASE_CONNECTION_CLASS, 
HBaseReadWrite.TEST_CONN);
-    HBaseReadWrite.setTestConnection(hconn);
-    /*HBaseReadWrite hbase = HBaseReadWrite.getInstance(conf);
-    hbase.setConnection(hconn);*/
-    store = new HBaseStore();
-    store.setConf(conf);
+  public void setup() throws IOException {
+    setupConnection();
+    setupHBaseStore();
   }
 
   @Test

Modified: 
hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreSql.java
URL: 
http://svn.apache.org/viewvc/hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreSql.java?rev=1667596&r1=1667595&r2=1667596&view=diff
==============================================================================
--- 
hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreSql.java
 (original)
+++ 
hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreSql.java
 Wed Mar 18 17:41:50 2015
@@ -20,111 +20,37 @@ package org.apache.hadoop.hive.metastore
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hive.cli.CliSessionState;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.SerDeInfo;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
-import org.apache.hadoop.hive.ql.session.SessionState;
 import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
-import org.junit.Rule;
 import org.junit.Test;
-import org.junit.rules.ExpectedException;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.MockitoAnnotations;
 import java.io.IOException;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
 
 /**
- * Integration tests with HBase Mini-cluster for HBaseStore
+ * Integration tests with HBase Mini-cluster using actual SQL
  */
-public class TestHBaseMetastoreSql {
+public class TestHBaseMetastoreSql extends IMockUtils {
 
   private static final Log LOG = 
LogFactory.getLog(TestHBaseStoreIntegration.class.getName());
 
-  private static HBaseTestingUtility utility;
-  private static HTableInterface tblTable;
-  private static HTableInterface sdTable;
-  private static HTableInterface partTable;
-  private static HTableInterface dbTable;
-  private static HTableInterface roleTable;
-  private static HTableInterface globalPrivsTable;
-  private static HTableInterface principalRoleMapTable;
-  private static Map<String, String> emptyParameters = new HashMap<String, 
String>();
-
-  @Rule public ExpectedException thrown = ExpectedException.none();
-  @Mock private HBaseConnection hconn;
-  private HBaseStore store;
-  private HiveConf conf;
-  private Driver driver;
-
   @BeforeClass
-  public static void startMiniCluster() throws Exception {
-    utility = new HBaseTestingUtility();
-    utility.startMiniCluster();
-    byte[][] families = new byte[][] {HBaseReadWrite.CATALOG_CF, 
HBaseReadWrite.STATS_CF};
-    tblTable = 
utility.createTable(HBaseReadWrite.TABLE_TABLE.getBytes(HBaseUtils.ENCODING),
-        families);
-    sdTable = 
utility.createTable(HBaseReadWrite.SD_TABLE.getBytes(HBaseUtils.ENCODING),
-        HBaseReadWrite.CATALOG_CF);
-    partTable = 
utility.createTable(HBaseReadWrite.PART_TABLE.getBytes(HBaseUtils.ENCODING),
-        families);
-    dbTable = 
utility.createTable(HBaseReadWrite.DB_TABLE.getBytes(HBaseUtils.ENCODING),
-        HBaseReadWrite.CATALOG_CF);
-    roleTable = 
utility.createTable(HBaseReadWrite.ROLE_TABLE.getBytes(HBaseUtils.ENCODING),
-        HBaseReadWrite.CATALOG_CF);
-    globalPrivsTable =
-        
utility.createTable(HBaseReadWrite.GLOBAL_PRIVS_TABLE.getBytes(HBaseUtils.ENCODING),
-            HBaseReadWrite.CATALOG_CF);
-    principalRoleMapTable =
-        
utility.createTable(HBaseReadWrite.USER_TO_ROLE_TABLE.getBytes(HBaseUtils.ENCODING),
-            HBaseReadWrite.CATALOG_CF);
+  public static void startup() throws Exception {
+    IMockUtils.startMiniCluster();
+
   }
 
   @AfterClass
-  public static void shutdownMiniCluster() throws Exception {
-    utility.shutdownMiniCluster();
+  public static void shutdown() throws Exception {
+    IMockUtils.shutdownMiniCluster();
   }
 
   @Before
-  public void setupConnection() throws IOException {
-    MockitoAnnotations.initMocks(this);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.SD_TABLE)).thenReturn(sdTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.TABLE_TABLE)).thenReturn(tblTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.PART_TABLE)).thenReturn(partTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.DB_TABLE)).thenReturn(dbTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.ROLE_TABLE)).thenReturn(roleTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.GLOBAL_PRIVS_TABLE)).thenReturn(globalPrivsTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.USER_TO_ROLE_TABLE)).thenReturn(principalRoleMapTable);
-    conf = new HiveConf();
-    conf.setVar(HiveConf.ConfVars.METASTORE_HBASE_CONNECTION_CLASS, 
HBaseReadWrite.TEST_CONN);
-    conf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict");
-    conf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL,
-        "org.apache.hadoop.hive.metastore.hbase.HBaseStore");
-    conf.setBoolVar(HiveConf.ConfVars.METASTORE_FASTPATH, true);
-    conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
-    HBaseReadWrite.setTestConnection(hconn);
-
-    SessionState.start(new CliSessionState(conf));
-    driver = new Driver(conf);
+  public void before() throws IOException {
+    setupConnection();
+    setupDriver();
   }
 
   @Test
@@ -143,5 +69,57 @@ public class TestHBaseMetastoreSql {
     Assert.assertEquals(0, rsp.getResponseCode());
   }
 
+  @Test
+  public void database() throws Exception {
+    CommandProcessorResponse rsp = driver.run("create database db");
+    Assert.assertEquals(0, rsp.getResponseCode());
+    rsp = driver.run("alter database db set owner user me");
+    Assert.assertEquals(0, rsp.getResponseCode());
+    rsp = driver.run("drop database db");
+    Assert.assertEquals(0, rsp.getResponseCode());
+  }
+
+  @Ignore
+  public void table() throws Exception {
+    driver.run("create table tbl (c int)");
+    CommandProcessorResponse rsp = driver.run("insert into table tbl values 
(3)");
+    Assert.assertEquals(0, rsp.getResponseCode());
+    rsp = driver.run("select * from tbl");
+    Assert.assertEquals(0, rsp.getResponseCode());
+    rsp = driver.run("alter table tbl set tblproperties ('example', 'true')");
+    Assert.assertEquals(0, rsp.getResponseCode());
+    rsp = driver.run("drop table tbl");
+    Assert.assertEquals(0, rsp.getResponseCode());
+  }
+
+  @Ignore
+  public void partitionedTable() throws Exception {
+    driver.run("create table parttbl (c int) partitioned by (ds string)");
+    CommandProcessorResponse rsp =
+        driver.run("insert into table parttbl partition(ds) values (1, 
'today'), (2, 'yesterday')" +
+            ", (3, 'tomorrow')");
+    Assert.assertEquals(0, rsp.getResponseCode());
+    // Do it again, to check insert into existing partitions
+    rsp = driver.run("insert into table parttbl partition(ds) values (4, 
'today'), (5, 'yesterday')"
+        + ", (6, 'tomorrow')");
+    Assert.assertEquals(0, rsp.getResponseCode());
+    rsp = driver.run("insert into table parttbl partition(ds = 'someday') 
values (1)");
+    Assert.assertEquals(0, rsp.getResponseCode());
+    rsp = driver.run("insert into table parttbl partition(ds = 'someday') 
values (2)");
+    Assert.assertEquals(0, rsp.getResponseCode());
+    rsp = driver.run("alter table parttbl add partition (ds = 'whenever')");
+    Assert.assertEquals(0, rsp.getResponseCode());
+    rsp = driver.run("insert into table parttbl partition(ds = 'whenever') 
values (2)");
+    Assert.assertEquals(0, rsp.getResponseCode());
+    rsp = driver.run("alter table parttbl touch partition (ds = 'whenever')");
+    Assert.assertEquals(0, rsp.getResponseCode());
+    rsp = driver.run("alter table parttbl drop partition (ds = 'whenever')");
+    Assert.assertEquals(0, rsp.getResponseCode());
+    rsp = driver.run("select * from parttbl");
+    Assert.assertEquals(0, rsp.getResponseCode());
+    rsp = driver.run("select * from parttbl where ds = 'today'");
+    Assert.assertEquals(0, rsp.getResponseCode());
+  }
+
 
 }

Modified: 
hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java
URL: 
http://svn.apache.org/viewvc/hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java?rev=1667596&r1=1667595&r2=1667596&view=diff
==============================================================================
--- 
hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java
 (original)
+++ 
hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java
 Wed Mar 18 17:41:50 2015
@@ -20,10 +20,6 @@ package org.apache.hadoop.hive.metastore
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStore;
 import org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData;
@@ -58,7 +54,6 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.StringColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.model.MRoleMap;
 import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.Before;
@@ -67,89 +62,35 @@ import org.junit.Ignore;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.MockitoAnnotations;
 
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
 
 /**
  * Integration tests with HBase Mini-cluster for HBaseStore
  */
-public class TestHBaseStoreIntegration {
+public class TestHBaseStoreIntegration extends IMockUtils {
 
   private static final Log LOG = 
LogFactory.getLog(TestHBaseStoreIntegration.class.getName());
 
-  private static HBaseTestingUtility utility;
-  private static HTableInterface tblTable;
-  private static HTableInterface sdTable;
-  private static HTableInterface partTable;
-  private static HTableInterface dbTable;
-  private static HTableInterface funcTable;
-  private static HTableInterface roleTable;
-  private static HTableInterface globalPrivsTable;
-  private static HTableInterface principalRoleMapTable;
-  private static Map<String, String> emptyParameters = new HashMap<String, 
String>();
-
   @Rule public ExpectedException thrown = ExpectedException.none();
-  @Mock private HBaseConnection hconn;
-  private HBaseStore store;
-  private HiveConf conf;
 
   @BeforeClass
-  public static void startMiniCluster() throws Exception {
-    utility = new HBaseTestingUtility();
-    utility.startMiniCluster();
-    byte[][] families = new byte[][] {HBaseReadWrite.CATALOG_CF, 
HBaseReadWrite.STATS_CF};
-    tblTable = 
utility.createTable(HBaseReadWrite.TABLE_TABLE.getBytes(HBaseUtils.ENCODING),
-        families);
-    sdTable = 
utility.createTable(HBaseReadWrite.SD_TABLE.getBytes(HBaseUtils.ENCODING),
-        HBaseReadWrite.CATALOG_CF);
-    partTable = 
utility.createTable(HBaseReadWrite.PART_TABLE.getBytes(HBaseUtils.ENCODING),
-        families);
-    dbTable = 
utility.createTable(HBaseReadWrite.DB_TABLE.getBytes(HBaseUtils.ENCODING),
-        HBaseReadWrite.CATALOG_CF);
-    funcTable = 
utility.createTable(HBaseReadWrite.FUNC_TABLE.getBytes(HBaseUtils.ENCODING),
-                                  HBaseReadWrite.CATALOG_CF);
-    roleTable = 
utility.createTable(HBaseReadWrite.ROLE_TABLE.getBytes(HBaseUtils.ENCODING),
-        HBaseReadWrite.CATALOG_CF);
-    globalPrivsTable =
-        
utility.createTable(HBaseReadWrite.GLOBAL_PRIVS_TABLE.getBytes(HBaseUtils.ENCODING),
-        HBaseReadWrite.CATALOG_CF);
-    principalRoleMapTable =
-        
utility.createTable(HBaseReadWrite.USER_TO_ROLE_TABLE.getBytes(HBaseUtils.ENCODING),
-        HBaseReadWrite.CATALOG_CF);
+  public static void startup() throws Exception {
+    IMockUtils.startMiniCluster();
   }
 
   @AfterClass
-  public static void shutdownMiniCluster() throws Exception {
-    utility.shutdownMiniCluster();
+  public static void shutdown() throws Exception {
+    IMockUtils.shutdownMiniCluster();
   }
 
   @Before
-  public void setupConnection() throws IOException {
-    MockitoAnnotations.initMocks(this);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.SD_TABLE)).thenReturn(sdTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.TABLE_TABLE)).thenReturn(tblTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.PART_TABLE)).thenReturn(partTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.DB_TABLE)).thenReturn(dbTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.FUNC_TABLE)).thenReturn(funcTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.ROLE_TABLE)).thenReturn(roleTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.GLOBAL_PRIVS_TABLE)).thenReturn(globalPrivsTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.USER_TO_ROLE_TABLE)).thenReturn(principalRoleMapTable);
-    conf = new HiveConf();
-    // Turn off caching, as we want to test actual interaction with HBase
-    conf.setBoolean(HBaseReadWrite.NO_CACHE_CONF, true);
-    conf.setVar(HiveConf.ConfVars.METASTORE_HBASE_CONNECTION_CLASS, 
HBaseReadWrite.TEST_CONN);
-    HBaseReadWrite.setTestConnection(hconn);
-    // HBaseReadWrite hbase = HBaseReadWrite.getInstance(conf);
-    store = new HBaseStore();
-    store.setConf(conf);
+  public void setup() throws IOException {
+    setupConnection();
+    setupHBaseStore();
   }
 
   @Test

Modified: 
hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java
URL: 
http://svn.apache.org/viewvc/hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java?rev=1667596&r1=1667595&r2=1667596&view=diff
==============================================================================
--- 
hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java
 (original)
+++ 
hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java
 Wed Mar 18 17:41:50 2015
@@ -20,10 +20,6 @@ package org.apache.hadoop.hive.metastore
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.SerDeInfo;
@@ -33,91 +29,37 @@ import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
-import org.junit.Rule;
 import org.junit.Test;
-import org.junit.rules.ExpectedException;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.MockitoAnnotations;
 import java.io.IOException;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
 
 /**
  * Integration tests with HBase Mini-cluster for HBaseStore
  */
-public class TestStorageDescriptorSharing {
+public class TestStorageDescriptorSharing extends IMockUtils {
 
   private static final Log LOG = 
LogFactory.getLog(TestHBaseStoreIntegration.class.getName());
 
-  private static HBaseTestingUtility utility;
-  private static HTableInterface tblTable;
-  private static HTableInterface sdTable;
-  private static HTableInterface partTable;
-  private static HTableInterface dbTable;
-  private static HTableInterface roleTable;
-  private static HTableInterface globalPrivsTable;
-  private static HTableInterface principalRoleMapTable;
-  private static Map<String, String> emptyParameters = new HashMap<String, 
String>();
-
-  @Rule public ExpectedException thrown = ExpectedException.none();
-  @Mock private HBaseConnection hconn;
-  private HBaseStore store;
-  private HiveConf conf;
   private MessageDigest md;
 
   @BeforeClass
-  public static void startMiniCluster() throws Exception {
-    utility = new HBaseTestingUtility();
-    utility.startMiniCluster();
-    byte[][] families = new byte[][] {HBaseReadWrite.CATALOG_CF, 
HBaseReadWrite.STATS_CF};
-    tblTable = 
utility.createTable(HBaseReadWrite.TABLE_TABLE.getBytes(HBaseUtils.ENCODING),
-        families);
-    sdTable = 
utility.createTable(HBaseReadWrite.SD_TABLE.getBytes(HBaseUtils.ENCODING),
-        HBaseReadWrite.CATALOG_CF);
-    partTable = 
utility.createTable(HBaseReadWrite.PART_TABLE.getBytes(HBaseUtils.ENCODING),
-        families);
-    dbTable = 
utility.createTable(HBaseReadWrite.DB_TABLE.getBytes(HBaseUtils.ENCODING),
-        HBaseReadWrite.CATALOG_CF);
-    roleTable = 
utility.createTable(HBaseReadWrite.ROLE_TABLE.getBytes(HBaseUtils.ENCODING),
-        HBaseReadWrite.CATALOG_CF);
-    globalPrivsTable =
-        
utility.createTable(HBaseReadWrite.GLOBAL_PRIVS_TABLE.getBytes(HBaseUtils.ENCODING),
-            HBaseReadWrite.CATALOG_CF);
-    principalRoleMapTable =
-        
utility.createTable(HBaseReadWrite.USER_TO_ROLE_TABLE.getBytes(HBaseUtils.ENCODING),
-            HBaseReadWrite.CATALOG_CF);
+  public static void startup() throws Exception {
+    IMockUtils.startMiniCluster();
   }
 
   @AfterClass
-  public static void shutdownMiniCluster() throws Exception {
-    utility.shutdownMiniCluster();
+  public static void shutdown() throws Exception {
+    IMockUtils.shutdownMiniCluster();
   }
 
   @Before
-  public void setupConnection() throws IOException {
-    MockitoAnnotations.initMocks(this);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.SD_TABLE)).thenReturn(sdTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.TABLE_TABLE)).thenReturn(tblTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.PART_TABLE)).thenReturn(partTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.DB_TABLE)).thenReturn(dbTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.ROLE_TABLE)).thenReturn(roleTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.GLOBAL_PRIVS_TABLE)).thenReturn(globalPrivsTable);
-    
Mockito.when(hconn.getHBaseTable(HBaseReadWrite.USER_TO_ROLE_TABLE)).thenReturn(principalRoleMapTable);
-    conf = new HiveConf();
-    // Turn off caching, as we want to test actual interaction with HBase
-    conf.setBoolean(HBaseReadWrite.NO_CACHE_CONF, true);
-    conf.setVar(HiveConf.ConfVars.METASTORE_HBASE_CONNECTION_CLASS, 
HBaseReadWrite.TEST_CONN);
-    HBaseReadWrite.setTestConnection(hconn);
-    // HBaseReadWrite hbase = HBaseReadWrite.getInstance(conf);
-    store = new HBaseStore();
-    store.setConf(conf);
-
+  public void setup() throws IOException {
+    setupConnection();
+    setupHBaseStore();
     try {
       md = MessageDigest.getInstance("MD5");
     } catch (NoSuchAlgorithmException e) {


Reply via email to