http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
----------------------------------------------------------------------
diff --git 
a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
 
b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
index cf51909..daf01ac 100644
--- 
a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
+++ 
b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
@@ -39,13 +39,12 @@ import org.apache.lens.server.api.query.QueryContext;
 import org.apache.lens.server.api.query.cost.QueryCost;
 import org.apache.lens.server.api.query.priority.CostRangePriorityDecider;
 import org.apache.lens.server.api.query.priority.CostToPriorityRangeConf;
+import org.apache.lens.server.api.user.MockDriverQueryHook;
 import org.apache.lens.server.api.util.LensUtil;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.*;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.ql.HiveDriverRunHook;
@@ -72,7 +71,9 @@ public class TestHiveDriver {
   private final String testOutputDir = "target/" + 
this.getClass().getSimpleName() + "/test-output";
 
   /** The conf. */
-  protected HiveConf conf;
+  protected Configuration driverConf = new Configuration();
+  protected HiveConf hiveConf = new HiveConf();
+  protected Configuration queryConf = new Configuration();
 
   /** The driver. */
   protected HiveDriver driver;
@@ -98,39 +99,37 @@ public class TestHiveDriver {
     // Check if hadoop property set
     System.out.println("###HADOOP_PATH " + 
System.getProperty("hadoop.bin.path"));
     assertNotNull(System.getProperty("hadoop.bin.path"));
-
     createDriver();
-    ss = new SessionState(conf, "testuser");
+    ss = new SessionState(hiveConf, "testuser");
     SessionState.start(ss);
-    Hive client = Hive.get(conf);
+    Hive client = Hive.get(hiveConf);
     Database database = new Database();
     database.setName(dataBase);
     client.createDatabase(database, true);
     SessionState.get().setCurrentDatabase(dataBase);
     sessionid = SessionState.get().getSessionId();
-
-    conf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
-    QueryContext context = createContext("USE " + dataBase, conf);
+    driverConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
+    QueryContext context = createContext("USE " + dataBase, this.queryConf);
     driver.execute(context);
-    conf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, true);
-    conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
+    driverConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, true);
+    driverConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, 
true);
   }
 
   protected void createDriver() throws LensException {
-    conf = new HiveConf();
-    conf.addResource("drivers/hive/hive1/hivedriver-site.xml");
-    conf.setClass(HiveDriver.HIVE_CONNECTION_CLASS, 
EmbeddedThriftConnection.class, ThriftConnection.class);
-    conf.set("hive.lock.manager", 
"org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager");
-    conf.setBoolean(HiveDriver.HS2_CALCULATE_PRIORITY, true);
+    driverConf.addResource("drivers/hive/hive1/hivedriver-site.xml");
+    driverConf.setClass(HiveDriver.HIVE_CONNECTION_CLASS, 
EmbeddedThriftConnection.class, ThriftConnection.class);
+    driverConf.setClass(HiveDriver.HIVE_QUERY_HOOK_CLASS, 
MockDriverQueryHook.class, DriverQueryHook.class);
+    driverConf.set("hive.lock.manager", 
"org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager");
+    driverConf.setBoolean(HiveDriver.HS2_CALCULATE_PRIORITY, true);
     driver = new HiveDriver();
-    driver.configure(conf, "hive", "hive1");
+    driver.configure(driverConf, "hive", "hive1");
     drivers = Lists.<LensDriver>newArrayList(driver);
     System.out.println("TestHiveDriver created");
   }
 
   @BeforeMethod
   public void setDB() {
-    SessionState.get().setCurrentDatabase(dataBase);
+    SessionState.setCurrentSessionState(ss);
   }
 
   protected QueryContext createContext(final String query, Configuration conf) 
throws LensException {
@@ -167,8 +166,19 @@ public class TestHiveDriver {
    */
   @AfterTest
   public void afterTest() throws Exception {
+    verifyThriftLogs();
     driver.close();
-    Hive.get(conf).dropDatabase(dataBase, true, true, true);
+    Hive.get(hiveConf).dropDatabase(dataBase, true, true, true);
+  }
+
+  private void verifyThriftLogs() throws IOException {
+    BufferedReader br = new BufferedReader(new FileReader(new 
File("target/test.log")));
+    for (String line = br.readLine(); line != null; line = br.readLine()) {
+      if (line.contains("Update from hive")) {
+        return;
+      }
+    }
+    fail("No updates from hive found in the logs");
   }
 
   /**
@@ -182,14 +192,13 @@ public class TestHiveDriver {
     System.out.println("Hadoop Location: " + 
System.getProperty("hadoop.bin.path"));
     String createTable = "CREATE TABLE IF NOT EXISTS " + tableName + "(ID 
STRING)" + " TBLPROPERTIES ('"
       + LensConfConstants.STORAGE_COST + "'='500')";
+    String dataLoad = "LOAD DATA LOCAL INPATH '" + TEST_DATA_FILE + "' 
OVERWRITE INTO TABLE " + tableName;
     // Create test table
-    QueryContext context = createContext(createTable, conf);
+    QueryContext context = createContext(createTable, queryConf);
     LensResultSet resultSet = driver.execute(context);
     assertNull(resultSet);
-
     // Load some data into the table
-    String dataLoad = "LOAD DATA LOCAL INPATH '" + TEST_DATA_FILE + "' 
OVERWRITE INTO TABLE " + tableName;
-    context = createContext(dataLoad, conf);
+    context = createContext(dataLoad, queryConf);
     resultSet = driver.execute(context);
     assertNull(resultSet);
     assertHandleSize(handleSize);
@@ -207,16 +216,16 @@ public class TestHiveDriver {
     String createTable = "CREATE TABLE IF NOT EXISTS " + tableName + "(ID 
STRING)"
       + " PARTITIONED BY (dt string) TBLPROPERTIES ('"
       + LensConfConstants.STORAGE_COST + "'='500')";
-    conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
+    queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, 
false);
     // Craete again
-    QueryContext context = createContext(createTable, conf);
+    QueryContext context = createContext(createTable, queryConf);
     LensResultSet resultSet = driver.execute(context);
     assertNull(resultSet);
 
     // Load some data into the table
     String dataLoad = "LOAD DATA LOCAL INPATH '" + TEST_DATA_FILE + "' 
OVERWRITE INTO TABLE " + tableName
       + " partition (dt='today')";
-    context = createContext(dataLoad, conf);
+    context = createContext(dataLoad, queryConf);
     resultSet = driver.execute(context);
     assertNull(resultSet);
     assertHandleSize(handleSize);
@@ -232,9 +241,9 @@ public class TestHiveDriver {
   @Test
   public void testInsertOverwriteConf() throws Exception {
     createTestTable("test_insert_overwrite");
-    conf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
+    queryConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
     String query = "SELECT ID FROM test_insert_overwrite";
-    QueryContext context = createContext(query, conf);
+    QueryContext context = createContext(query, queryConf);
     driver.addPersistentPath(context);
     assertEquals(context.getUserQuery(), query);
     assertNotNull(context.getDriverContext().getDriverQuery(driver));
@@ -250,17 +259,17 @@ public class TestHiveDriver {
   public void testTemptable() throws Exception {
     int handleSize = getHandleSize();
     createTestTable("test_temp");
-    conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
-    Hive.get(conf).dropTable("test_temp_output");
+    queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, 
false);
+    Hive.get(hiveConf).dropTable("test_temp_output");
     String query = "CREATE TABLE test_temp_output AS SELECT ID FROM test_temp";
-    QueryContext context = createContext(query, conf);
+    QueryContext context = createContext(query, queryConf);
     LensResultSet resultSet = driver.execute(context);
     assertNull(resultSet);
     assertHandleSize(handleSize);
 
     // fetch results from temp table
     String select = "SELECT * FROM test_temp_output";
-    context = createContext(select, conf);
+    context = createContext(select, queryConf);
     resultSet = driver.execute(context);
     assertHandleSize(handleSize);
     validateInMemoryResult(resultSet, "test_temp_output");
@@ -278,22 +287,22 @@ public class TestHiveDriver {
     createTestTable("test_execute");
     LensResultSet resultSet = null;
     // Execute a select query
-    conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
+    queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, 
false);
     String select = "SELECT ID FROM test_execute";
-    QueryContext context = createContext(select, conf);
+    QueryContext context = createContext(select, queryConf);
     resultSet = driver.execute(context);
     assertNotNull(context.getDriverConf(driver).get("mapred.job.name"));
     validateInMemoryResult(resultSet);
-    conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
-    context = createContext(select, conf);
+    queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, 
true);
+    context = createContext(select, queryConf);
     resultSet = driver.execute(context);
     validatePersistentResult(resultSet, TEST_DATA_FILE, 
context.getHDFSResultDir(), false);
-    conf.set(LensConfConstants.QUERY_OUTPUT_DIRECTORY_FORMAT,
+    queryConf.set(LensConfConstants.QUERY_OUTPUT_DIRECTORY_FORMAT,
       "ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'"
         + " WITH SERDEPROPERTIES ('serialization.null.format'='-NA-',"
         + " 'field.delim'=','  ) STORED AS TEXTFILE ");
     select = "SELECT ID, null, ID FROM test_execute";
-    context = createContext(select, conf);
+    context = createContext(select, queryConf);
     resultSet = driver.execute(context);
     validatePersistentResult(resultSet, TEST_DATA_FILE, 
context.getHDFSResultDir(), true);
     assertHandleSize(handleSize);
@@ -397,9 +406,9 @@ public class TestHiveDriver {
 
     // Now run a command that would fail
     String expectFail = "SELECT ID FROM test_execute_sync";
-    conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
-    Configuration failConf = new Configuration(conf);
-    failConf.set("hive.exec.driver.run.hooks", 
FailHook.class.getCanonicalName());
+    queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, 
true);
+    Configuration failConf = new Configuration(queryConf);
+    failConf.set("hive.exec.driver.run.hooks", FailHook.class.getName());
     QueryContext context = createContext(expectFail, failConf);
     driver.executeAsync(context);
     assertHandleSize(handleSize + 1);
@@ -409,8 +418,8 @@ public class TestHiveDriver {
     assertHandleSize(handleSize);
     // Async select query
     String select = "SELECT ID FROM test_execute_sync";
-    conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
-    context = createContext(select, conf);
+    queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, 
false);
+    context = createContext(select, queryConf);
     driver.executeAsync(context);
     assertNotNull(context.getDriverConf(driver).get("mapred.job.name"));
     assertNotNull(context.getDriverConf(driver).get("mapred.job.priority"));
@@ -419,20 +428,20 @@ public class TestHiveDriver {
     driver.closeQuery(context.getQueryHandle());
     assertHandleSize(handleSize);
 
-    conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
-    context = createContext(select, conf);
+    queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, 
true);
+    context = createContext(select, queryConf);
     driver.executeAsync(context);
     assertHandleSize(handleSize + 1);
     validateExecuteAsync(context, DriverQueryState.SUCCESSFUL, true, false);
     driver.closeQuery(context.getQueryHandle());
     assertHandleSize(handleSize);
 
-    conf.set(LensConfConstants.QUERY_OUTPUT_DIRECTORY_FORMAT,
+    queryConf.set(LensConfConstants.QUERY_OUTPUT_DIRECTORY_FORMAT,
       "ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'"
         + " WITH SERDEPROPERTIES ('serialization.null.format'='-NA-',"
         + " 'field.delim'=','  ) STORED AS TEXTFILE ");
     select = "SELECT ID, null, ID FROM test_execute_sync";
-    context = createContext(select, conf);
+    context = createContext(select, queryConf);
     driver.executeAsync(context);
     assertHandleSize(handleSize + 1);
     validateExecuteAsync(context, DriverQueryState.SUCCESSFUL, true, true);
@@ -494,8 +503,10 @@ public class TestHiveDriver {
   public void testCancelAsyncQuery() throws Exception {
     int handleSize = getHandleSize();
     createTestTable("test_cancel_async");
-    conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
-    QueryContext context = createContext("SELECT ID FROM test_cancel_async", 
conf);
+    queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, 
false);
+    QueryContext context = createContext("select a.id aid, b.id bid from "
+        + "((SELECT ID FROM test_cancel_async) a full outer join (select id 
from test_cancel_async) b)",
+      queryConf);
     driver.executeAsync(context);
     driver.cancelQuery(context.getQueryHandle());
     driver.updateStatus(context);
@@ -527,10 +538,15 @@ public class TestHiveDriver {
     String path = persistentResultSet.getOutputPath();
 
     Path actualPath = new Path(path);
-    FileSystem fs = actualPath.getFileSystem(conf);
+    FileSystem fs = actualPath.getFileSystem(driverConf);
     assertEquals(actualPath, fs.makeQualified(outptuDir));
     List<String> actualRows = new ArrayList<String>();
-    for (FileStatus stat : fs.listStatus(actualPath)) {
+    for (FileStatus stat : fs.listStatus(actualPath, new PathFilter() {
+      @Override
+      public boolean accept(Path path) {
+        return !new File(path.toUri()).isDirectory();
+      }
+    })) {
       FSDataInputStream in = fs.open(stat.getPath());
       BufferedReader br = null;
       try {
@@ -579,33 +595,33 @@ public class TestHiveDriver {
   public void testPersistentResultSet() throws Exception {
     int handleSize = getHandleSize();
     createTestTable("test_persistent_result_set");
-    conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
-    conf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, true);
-    conf.set(LensConfConstants.RESULT_SET_PARENT_DIR, testOutputDir);
-    QueryContext ctx = createContext("SELECT ID FROM 
test_persistent_result_set", conf);
+    queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, 
true);
+    queryConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, true);
+    queryConf.set(LensConfConstants.RESULT_SET_PARENT_DIR, testOutputDir);
+    QueryContext ctx = createContext("SELECT ID FROM 
test_persistent_result_set", queryConf);
     LensResultSet resultSet = driver.execute(ctx);
     validatePersistentResult(resultSet, TEST_DATA_FILE, 
ctx.getHDFSResultDir(), false);
     assertHandleSize(handleSize);
 
-    ctx = createContext("SELECT ID FROM test_persistent_result_set", conf);
+    ctx = createContext("SELECT ID FROM test_persistent_result_set", 
queryConf);
     driver.executeAsync(ctx);
     assertHandleSize(handleSize + 1);
     validateExecuteAsync(ctx, DriverQueryState.SUCCESSFUL, true, false);
     driver.closeQuery(ctx.getQueryHandle());
     assertHandleSize(handleSize);
 
-    conf.set(LensConfConstants.QUERY_OUTPUT_DIRECTORY_FORMAT,
+    queryConf.set(LensConfConstants.QUERY_OUTPUT_DIRECTORY_FORMAT,
       "ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'"
         + " WITH SERDEPROPERTIES ('serialization.null.format'='-NA-',"
         + " 'field.delim'=','  ) STORED AS TEXTFILE ");
-    ctx = createContext("SELECT ID, null, ID FROM test_persistent_result_set", 
conf);
+    ctx = createContext("SELECT ID, null, ID FROM test_persistent_result_set", 
queryConf);
     resultSet = driver.execute(ctx);
     assertHandleSize(handleSize);
     validatePersistentResult(resultSet, TEST_DATA_FILE, 
ctx.getHDFSResultDir(), true);
     driver.closeQuery(ctx.getQueryHandle());
     assertHandleSize(handleSize);
 
-    ctx = createContext("SELECT ID, null, ID FROM test_persistent_result_set", 
conf);
+    ctx = createContext("SELECT ID, null, ID FROM test_persistent_result_set", 
queryConf);
     driver.executeAsync(ctx);
     assertHandleSize(handleSize + 1);
     validateExecuteAsync(ctx, DriverQueryState.SUCCESSFUL, true, true);
@@ -640,7 +656,7 @@ public class TestHiveDriver {
   public void testEstimateNativeQuery() throws Exception {
     createTestTable("test_estimate");
     SessionState.setCurrentSessionState(ss);
-    QueryCost cost = driver.estimate(createExplainContext("SELECT ID FROM 
test_estimate", conf));
+    QueryCost cost = driver.estimate(createExplainContext("SELECT ID FROM 
test_estimate", queryConf));
     assertEquals(cost.getEstimatedResourceUsage(), Double.MAX_VALUE);
     cost.getEstimatedExecTimeMillis();
 
@@ -649,7 +665,7 @@ public class TestHiveDriver {
   @Test(expectedExceptions = {UnsupportedOperationException.class})
   public void testEstimateOlapQuery() throws Exception {
     SessionState.setCurrentSessionState(ss);
-    ExplainQueryContext ctx = createExplainContext("cube SELECT ID FROM 
test_cube", conf);
+    ExplainQueryContext ctx = createExplainContext("cube SELECT ID FROM 
test_cube", queryConf);
     ctx.setOlapQuery(true);
     ctx.getDriverContext().setDriverRewriterPlan(driver, new DriverQueryPlan() 
{
       @Override
@@ -676,11 +692,10 @@ public class TestHiveDriver {
   public void testExplainNativeFailingQuery() throws Exception {
     SessionState.setCurrentSessionState(ss);
     try {
-      driver.estimate(createExplainContext("SELECT ID FROM nonexist", conf));
+      driver.estimate(createExplainContext("SELECT ID FROM nonexist", 
queryConf));
       fail("Should not reach here");
     } catch (LensException e) {
-      assertTrue(LensUtil.getCauseMessage(e).contains("Error while"
-        + " compiling statement: FAILED: SemanticException [Error 10001]: Line 
1:32 Table not found 'nonexist'"));
+      assertTrue(LensUtil.getCauseMessage(e).contains("Line 1:32 Table not 
found 'nonexist'"));
     }
   }
 
@@ -697,18 +712,18 @@ public class TestHiveDriver {
     SessionState.setCurrentSessionState(ss);
     SessionState.get().setCurrentDatabase(dataBase);
     createTestTable("test_explain");
-    DriverQueryPlan plan = driver.explain(createExplainContext("SELECT ID FROM 
test_explain", conf));
+    DriverQueryPlan plan = driver.explain(createExplainContext("SELECT ID FROM 
test_explain", queryConf));
     assertTrue(plan instanceof HiveQueryPlan);
     assertEquals(plan.getTableWeight(dataBase + ".test_explain"), 500.0);
     assertHandleSize(handleSize);
 
     // test execute prepare
-    PreparedQueryContext pctx = new PreparedQueryContext("SELECT ID FROM 
test_explain", null, conf, drivers);
+    PreparedQueryContext pctx = new PreparedQueryContext("SELECT ID FROM 
test_explain", null, queryConf, drivers);
     pctx.setSelectedDriver(driver);
     pctx.setLensSessionIdentifier(sessionid);
 
     SessionState.setCurrentSessionState(ss);
-    HiveConf inConf = new HiveConf(conf);
+    Configuration inConf = new Configuration(queryConf);
     inConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, 
false);
     plan = driver.explainAndPrepare(pctx);
     QueryContext qctx = createContext(pctx, inConf);
@@ -717,8 +732,8 @@ public class TestHiveDriver {
     validateInMemoryResult(result);
 
     // test execute prepare async
-    conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
-    qctx = createContext(pctx, conf);
+    queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, 
true);
+    qctx = createContext(pctx, queryConf);
     driver.executeAsync(qctx);
     assertNotNull(qctx.getDriverOpHandle());
     validateExecuteAsync(qctx, DriverQueryState.SUCCESSFUL, true, false);
@@ -735,7 +750,7 @@ public class TestHiveDriver {
     assertHandleSize(handleSize);
     validateInMemoryResult(result);
     // test execute prepare async
-    qctx = createContext(pctx, conf);
+    qctx = createContext(pctx, queryConf);
     qctx.setQueryHandle(new 
QueryHandle(pctx.getPrepareHandle().getPrepareHandleId()));
     driver.executeAsync(qctx);
     assertHandleSize(handleSize + 1);
@@ -757,7 +772,7 @@ public class TestHiveDriver {
     createPartitionedTable("test_part_table");
     // acquire
     SessionState.setCurrentSessionState(ss);
-    DriverQueryPlan plan = driver.explain(createExplainContext("SELECT ID FROM 
test_part_table", conf));
+    DriverQueryPlan plan = driver.explain(createExplainContext("SELECT ID FROM 
test_part_table", queryConf));
     assertHandleSize(handleSize);
     assertTrue(plan instanceof HiveQueryPlan);
     assertNotNull(plan.getTablesQueried());
@@ -783,8 +798,9 @@ public class TestHiveDriver {
     createTestTable("explain_test_2");
     SessionState.setCurrentSessionState(ss);
     DriverQueryPlan plan = driver.explain(createExplainContext("SELECT 
explain_test_1.ID, count(1) FROM "
-      + " explain_test_1  join explain_test_2 on explain_test_1.ID = 
explain_test_2.ID"
-      + " WHERE explain_test_1.ID = 'foo' or explain_test_2.ID = 'bar'" + " 
GROUP BY explain_test_1.ID", conf));
+        + " explain_test_1  join explain_test_2 on explain_test_1.ID = 
explain_test_2.ID"
+        + " WHERE explain_test_1.ID = 'foo' or explain_test_2.ID = 'bar'" + " 
GROUP BY explain_test_1.ID",
+      queryConf));
 
     assertHandleSize(handleSize);
     assertTrue(plan instanceof HiveQueryPlan);
@@ -806,10 +822,10 @@ public class TestHiveDriver {
   public void testExplainOutputPersistent() throws Exception {
     int handleSize = getHandleSize();
     createTestTable("explain_test_1");
-    conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
+    queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, 
true);
     SessionState.setCurrentSessionState(ss);
     String query2 = "SELECT DISTINCT ID FROM explain_test_1";
-    PreparedQueryContext pctx = new PreparedQueryContext(query2, null, conf, 
drivers);
+    PreparedQueryContext pctx = createPreparedQueryContext(query2);
     pctx.setSelectedDriver(driver);
     pctx.setLensSessionIdentifier(sessionid);
     DriverQueryPlan plan2 = driver.explainAndPrepare(pctx);
@@ -818,7 +834,7 @@ public class TestHiveDriver {
     assertNotNull(plan2.getTablesQueried());
     assertEquals(plan2.getTablesQueried().size(), 1);
     assertTrue(plan2.getTableWeights().containsKey(dataBase + 
".explain_test_1"));
-    QueryContext ctx = createContext(pctx, conf);
+    QueryContext ctx = createContext(pctx, queryConf);
     LensResultSet resultSet = driver.execute(ctx);
     assertHandleSize(handleSize);
     HivePersistentResultSet persistentResultSet = (HivePersistentResultSet) 
resultSet;
@@ -827,6 +843,13 @@ public class TestHiveDriver {
     driver.closeQuery(plan2.getHandle());
   }
 
+  private PreparedQueryContext createPreparedQueryContext(String query2) {
+    PreparedQueryContext pctx = new PreparedQueryContext(query2, null, 
queryConf, drivers);
+    pctx.setSelectedDriver(driver);
+    pctx.setLensSessionIdentifier(sessionid);
+    return pctx;
+  }
+
   @DataProvider
   public Object[][] priorityDataProvider() throws IOException, ParseException {
     BufferedReader br = new BufferedReader(new InputStreamReader(
@@ -891,7 +914,7 @@ public class TestHiveDriver {
   @Test
   public void testPriorityWithoutFactPartitions() throws LensException {
     // test priority without fact partitions
-    QueryContext ctx = createContext("test priority query", conf);
+    QueryContext ctx = createContext("test priority query", queryConf);
     ctx.getDriverContext().setDriverRewriterPlan(driver, new DriverQueryPlan() 
{
 
       @Override
@@ -918,11 +941,11 @@ public class TestHiveDriver {
         }
       });
     ctx.setDriverCost(driver, driver.queryCostCalculator.calculateCost(ctx, 
driver));
-    assertEquals(driver.decidePriority(ctx), Priority.VERY_HIGH);
-    
assertEquals(alwaysNormalPriorityDecider.decidePriority(ctx.getDriverQueryCost(driver)),
 Priority.NORMAL);
+    assertEquals(driver.decidePriority(ctx, driver.queryPriorityDecider), 
Priority.VERY_HIGH);
+    assertEquals(driver.decidePriority(ctx, alwaysNormalPriorityDecider), 
Priority.NORMAL);
 
     // test priority without rewriter plan
-    ctx = createContext("test priority query", conf);
+    ctx = createContext("test priority query", queryConf);
     ctx.getDriverContext().setDriverRewriterPlan(driver, new DriverQueryPlan() 
{
       @Override
       public String getPlan() {

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
----------------------------------------------------------------------
diff --git 
a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
 
b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
index 765bb45..00d2a1c 100644
--- 
a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
+++ 
b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
@@ -28,13 +28,16 @@ import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.lens.api.query.QueryHandle;
 import org.apache.lens.server.api.LensConfConstants;
+import org.apache.lens.server.api.driver.DriverQueryHook;
 import org.apache.lens.server.api.driver.DriverQueryPlan;
 import org.apache.lens.server.api.driver.DriverQueryStatus.DriverQueryState;
 import org.apache.lens.server.api.driver.LensDriver;
 import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.query.QueryContext;
+import org.apache.lens.server.api.user.MockDriverQueryHook;
 
 import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hive.service.Service;
@@ -65,8 +68,8 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
 
   /** The remote conf. */
 
-  private static HiveConf remoteConf = new HiveConf();
-
+  private static Configuration remoteConf = new Configuration();
+  private static HiveConf hiveConf;
   /**
    * Setup test.
    *
@@ -86,16 +89,18 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
   public static void createHS2Service() throws Exception {
     remoteConf.setClass(HiveDriver.HIVE_CONNECTION_CLASS, 
RemoteThriftConnection.class, ThriftConnection.class);
     remoteConf.set("hive.lock.manager", 
"org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager");
-    remoteConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, 
HS2_HOST);
-    remoteConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT, HS2_PORT);
-    
remoteConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_CONNECTION_RETRY_LIMIT,
 3);
-    
remoteConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_RETRY_LIMIT, 
3);
-    
remoteConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_RETRY_DELAY_SECONDS,
 10);
-    
remoteConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT,
 1);
-    remoteConf.setIntVar(HiveConf.ConfVars.SERVER_READ_SOCKET_TIMEOUT, 60000);
+    HiveConf.setVar(remoteConf, 
HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, HS2_HOST);
+    HiveConf.setIntVar(remoteConf, HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT, 
HS2_PORT);
+    HiveConf.setIntVar(remoteConf, 
HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_CONNECTION_RETRY_LIMIT, 3);
+    HiveConf.setIntVar(remoteConf, 
HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_RETRY_LIMIT, 3);
+    HiveConf.setVar(remoteConf, 
HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_RETRY_DELAY_SECONDS, "10s");
+    HiveConf.setVar(remoteConf, 
HiveConf.ConfVars.HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT, "1s");
+    HiveConf.setVar(remoteConf, HiveConf.ConfVars.SERVER_READ_SOCKET_TIMEOUT, 
"60000s");
     remoteConf.setLong(HiveDriver.HS2_CONNECTION_EXPIRY_DELAY, 10000);
     server = new HiveServer2();
-    server.init(remoteConf);
+    hiveConf = new HiveConf();
+    hiveConf.addResource(remoteConf);
+    server.init(hiveConf);
     server.start();
     // TODO figure out a better way to wait for thrift service to start
     Thread.sleep(7000);
@@ -130,11 +135,12 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
 
   protected void createDriver() throws LensException {
     dataBase = TestRemoteHiveDriver.class.getSimpleName().toLowerCase();
-    conf = new HiveConf(remoteConf);
-    conf.addResource("drivers/hive/hive1/hivedriver-site.xml");
+    driverConf = new Configuration(remoteConf);
+    driverConf.addResource("drivers/hive/hive1/hivedriver-site.xml");
     driver = new HiveDriver();
-    conf.setBoolean(HiveDriver.HS2_CALCULATE_PRIORITY, true);
-    driver.configure(conf, "hive", "hive1");
+    driverConf.setBoolean(HiveDriver.HS2_CALCULATE_PRIORITY, true);
+    driverConf.setClass(HiveDriver.HIVE_QUERY_HOOK_CLASS, 
MockDriverQueryHook.class, DriverQueryHook.class);
+    driver.configure(driverConf, "hive", "hive1");
     drivers = Lists.<LensDriver>newArrayList(driver);
     System.out.println("TestRemoteHiveDriver created");
   }
@@ -147,13 +153,15 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
   @Test
   public void testMultiThreadClient() throws Exception {
     log.info("@@ Starting multi thread test");
+    SessionState.get().setCurrentDatabase(dataBase);
+    final SessionState state = SessionState.get();
     // Launch two threads
     createTestTable("test_multithreads");
-    HiveConf thConf = new HiveConf(conf, TestRemoteHiveDriver.class);
+    Configuration thConf = new Configuration(driverConf);
     thConf.setLong(HiveDriver.HS2_CONNECTION_EXPIRY_DELAY, 10000);
     final HiveDriver thrDriver = new HiveDriver();
     thrDriver.configure(thConf, "hive", "hive1");
-    QueryContext ctx = createContext("USE " + dataBase, conf, thrDriver);
+    QueryContext ctx = createContext("USE " + dataBase, queryConf, thrDriver);
     thrDriver.execute(ctx);
 
     // Launch a select query
@@ -166,7 +174,7 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
     for (int q = 0; q < QUERIES; q++) {
       final QueryContext qctx;
       try {
-        qctx = createContext("SELECT * FROM test_multithreads", conf, 
thrDriver);
+        qctx = createContext("SELECT * FROM test_multithreads", queryConf, 
thrDriver);
         thrDriver.executeAsync(qctx);
       } catch (LensException e) {
         errCount.incrementAndGet();
@@ -177,12 +185,12 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
       launchedQueries++;
       // Launch many threads to poll for status
       final QueryHandle handle = qctx.getQueryHandle();
-
       for (int i = 0; i < THREADS; i++) {
         int thid = q * THREADS + i;
         Thread th = new Thread(new Runnable() {
           @Override
           public void run() {
+            SessionState.setCurrentSessionState(state);
             for (int i = 0; i < 1000; i++) {
               try {
                 thrDriver.updateStatus(qctx);
@@ -231,7 +239,7 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
   @Test
   public void testHiveDriverPersistence() throws Exception {
     System.out.println("@@@@ start_persistence_test");
-    HiveConf driverConf = new HiveConf(remoteConf, TestRemoteHiveDriver.class);
+    Configuration driverConf = new Configuration(remoteConf);
     driverConf.addResource("drivers/hive/hive1/hivedriver-site.xml");
     driverConf.setLong(HiveDriver.HS2_CONNECTION_EXPIRY_DELAY, 10000);
     driverConf.setBoolean(HiveDriver.HS2_CALCULATE_PRIORITY, false);
@@ -239,9 +247,9 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
     final HiveDriver oldDriver = new HiveDriver();
     oldDriver.configure(driverConf, "hive", "hive1");
 
-    driverConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
-    driverConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, 
false);
-    QueryContext ctx = createContext("USE " + dataBase, driverConf, oldDriver);
+    queryConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
+    queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, 
false);
+    QueryContext ctx = createContext("USE " + dataBase, queryConf, oldDriver);
     oldDriver.execute(ctx);
     Assert.assertEquals(0, oldDriver.getHiveHandleSize());
 
@@ -249,20 +257,20 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
 
     // Create some ops with a driver
     String createTable = "CREATE TABLE IF NOT EXISTS " + tableName + "(ID 
STRING)";
-    ctx = createContext(createTable, driverConf, oldDriver);
+    ctx = createContext(createTable, queryConf, oldDriver);
     oldDriver.execute(ctx);
 
     // Load some data into the table
     String dataLoad = "LOAD DATA LOCAL INPATH '" + TEST_DATA_FILE + "' 
OVERWRITE INTO TABLE " + tableName;
-    ctx = createContext(dataLoad, driverConf, oldDriver);
+    ctx = createContext(dataLoad, queryConf, oldDriver);
     oldDriver.execute(ctx);
 
-    driverConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, true);
-    driverConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, 
true);
+    queryConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, true);
+    queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, 
true);
     // Fire two queries
-    QueryContext ctx1 = createContext("SELECT * FROM " + tableName, 
driverConf, oldDriver);
+    QueryContext ctx1 = createContext("SELECT * FROM " + tableName, queryConf, 
oldDriver);
     oldDriver.executeAsync(ctx1);
-    QueryContext ctx2 = createContext("SELECT ID FROM " + tableName, 
driverConf, oldDriver);
+    QueryContext ctx2 = createContext("SELECT ID FROM " + tableName, 
queryConf, oldDriver);
     oldDriver.executeAsync(ctx2);
     Assert.assertEquals(2, oldDriver.getHiveHandleSize());
 
@@ -330,13 +338,14 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
    * @throws IOException            Signals that an I/O exception has occurred.
    * @throws ClassNotFoundException the class not found exception
    */
-  private QueryContext readContext(byte[] bytes, LensDriver driver) throws 
IOException, ClassNotFoundException {
+  private QueryContext readContext(byte[] bytes, LensDriver driver) throws 
IOException,
+    ClassNotFoundException {
     ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
     ObjectInputStream in = new ObjectInputStream(bais);
     QueryContext ctx;
     try {
       ctx = (QueryContext) in.readObject();
-      ctx.setConf(driver.getConf());
+      ctx.setConf(queryConf);
       boolean driverAvailable = in.readBoolean();
       if (driverAvailable) {
         String driverQualifiedName = in.readUTF();
@@ -357,11 +366,11 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
    * @throws Exception the exception
    */
   private void createPartitionedTable(String tableName, int partitions) throws 
Exception {
-    conf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
-    conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
+    queryConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
+    queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, 
false);
 
     QueryContext ctx = createContext("CREATE EXTERNAL TABLE IF NOT EXISTS " + 
tableName
-      + " (ID STRING) PARTITIONED BY (DT STRING, ET STRING)", conf);
+      + " (ID STRING) PARTITIONED BY (DT STRING, ET STRING)", queryConf);
 
     driver.execute(ctx);
     Assert.assertEquals(0, driver.getHiveHandleSize());
@@ -383,7 +392,7 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
 
       System.out.println("@@ Adding partition " + i);
       QueryContext partCtx = createContext("ALTER TABLE " + tableName + " ADD 
IF NOT EXISTS PARTITION (DT='p" + i
-        + "', ET='1') LOCATION '" + partDir.getPath() + "'", conf);
+        + "', ET='1') LOCATION '" + partDir.getPath() + "'", queryConf);
       driver.execute(partCtx);
     }
   }
@@ -406,7 +415,7 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
       + "AND table_1.ET='1'";
 
     SessionState.setCurrentSessionState(ss);
-    DriverQueryPlan plan = driver.explain(createExplainContext(explainQuery, 
conf));
+    DriverQueryPlan plan = driver.explain(createExplainContext(explainQuery, 
queryConf));
 
     Assert.assertEquals(0, driver.getHiveHandleSize());
     System.out.println("@@ partitions" + plan.getPartitions());

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-hive/src/test/resources/hive-site.xml
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/resources/hive-site.xml 
b/lens-driver-hive/src/test/resources/hive-site.xml
index de5af50..67fe744 100644
--- a/lens-driver-hive/src/test/resources/hive-site.xml
+++ b/lens-driver-hive/src/test/resources/hive-site.xml
@@ -36,6 +36,11 @@
   </property>
 
   <property>
+    <name>hive.exec.scratchdir</name>
+    <value>${project.build.directory}/hive/scratch</value>
+  </property>
+
+  <property>
     <name>mapreduce.framework.name</name>
     <value>local</value>
   </property>
@@ -56,4 +61,9 @@
     <value>target/query_logs</value>
   </property>
 
+  <property>
+    <name>datanucleus.schema.autoCreateTables</name>
+    <value>true</value>
+  </property>
+
 </configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-jdbc/pom.xml
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/pom.xml b/lens-driver-jdbc/pom.xml
index 0dd0fb5..fd1f78f 100644
--- a/lens-driver-jdbc/pom.xml
+++ b/lens-driver-jdbc/pom.xml
@@ -73,9 +73,17 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hive</groupId>
+      <artifactId>hive-metastore</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
       <artifactId>hive-service</artifactId>
     </dependency>
     <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-shims</artifactId>
+    </dependency>
+    <dependency>
       <groupId>commons-lang</groupId>
       <artifactId>commons-lang</artifactId>
     </dependency>
@@ -90,6 +98,22 @@
       <version>${project.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>datanucleus-api-jdo</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>datanucleus-rdbms</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.derby</groupId>
+      <artifactId>derby</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>javax.jdo</artifactId>
+    </dependency>
   </dependencies>
   <build>
     <plugins>

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
----------------------------------------------------------------------
diff --git 
a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
 
b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
index a004de9..34312c0 100644
--- 
a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
+++ 
b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
@@ -204,7 +204,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
 
     QB qb = new QB(null, null, false);
 
-    if (!c1.doPhase1(ast, qb, c1.initPhase1Ctx())) {
+    if (!c1.doPhase1(ast, qb, c1.initPhase1Ctx(), null)) {
       return;
     }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCResultSet.java
----------------------------------------------------------------------
diff --git 
a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCResultSet.java 
b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCResultSet.java
index 9e1a0c0..a0cedf4 100644
--- 
a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCResultSet.java
+++ 
b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCResultSet.java
@@ -32,9 +32,9 @@ import 
org.apache.lens.server.api.driver.LensResultSetMetadata;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.serde2.thrift.Type;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hive.service.cli.ColumnDescriptor;
-import org.apache.hive.service.cli.Type;
 import org.apache.hive.service.cli.TypeDescriptor;
 import org.apache.hive.service.cli.TypeQualifiers;
 
@@ -164,7 +164,9 @@ public class JDBCResultSet extends InMemoryResultSet {
       List<ColumnDescriptor> columns = new 
ArrayList<ColumnDescriptor>(fieldSchemas.size());
 
       for (int i = 0; i < fieldSchemas.size(); i++) {
-        columns.add(new ColumnDescriptor(fieldSchemas.get(i).toFieldSchema(), 
i + 1));
+        FieldSchema schema = fieldSchemas.get(i).toFieldSchema();
+        
columns.add(ColumnDescriptor.newPrimitiveColumnDescriptor(schema.getName(), 
schema.getComment(),
+          Type.getType(schema.getType()), i + 1));
       }
       return columns;
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
----------------------------------------------------------------------
diff --git 
a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
 
b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
index e71b507..12fa6f0 100644
--- 
a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
+++ 
b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
@@ -137,7 +137,8 @@ public class TestColumnarSQLRewriter {
     conf.addResource("jdbcdriver-default.xml");
     conf.addResource("drivers/jdbc/jdbc1/jdbcdriver-site.xml");
     qtest.init(conf);
-
+    hconf.addResource(conf);
+    SessionState.start(hconf);
     List<FieldSchema> factColumns = new ArrayList<>();
     factColumns.add(new FieldSchema("item_key", "int", ""));
     factColumns.add(new FieldSchema("branch_key", "int", ""));
@@ -203,8 +204,6 @@ public class TestColumnarSQLRewriter {
   // Testing multiple queries in one instance
   public void testNoRewrite() throws LensException {
 
-    SessionState.start(hconf);
-
     String query = "select count(distinct id) from location_dim";
     String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select count( distinct  id ) from location_dim ";
@@ -243,8 +242,6 @@ public class TestColumnarSQLRewriter {
       + "on fact.other_location_key = other_location_dim.location_key where 
time_dim.time_key "
       + "between '2013-01-01' and '2013-01-31' and location_dim_a.location_key 
= 'some-loc' "
       + "group by fact.time_key, location_dim_a.location_key, 
other_location_dim.location_key";
-
-    SessionState.start(hconf);
     qtest.rewrite(query, conf, hconf);
     String expected = "sales_fact___fact.time_key in  (  select time_dim 
.time_key from time_dim "
       + "where (time_dim.time_key) between '2013-01-01' and '2013-01-31' ) and 
"
@@ -270,9 +267,6 @@ public class TestColumnarSQLRewriter {
         + "and location_dim.location_name = 'test123' "
         + "where time_dim.time_key between '2013-01-01' and '2013-01-31' "
         + "group by fact.time_key,time_dim.day_of_week,time_dim.day " + "order 
by dollars_sold desc ";
-
-    SessionState.start(hconf);
-
     String rwq = qtest.rewrite(query, conf, hconf);
     String expected = "inner join (select time_key, day_of_week, day from 
time_dim) time_dim___time_dim "
       + "on (( sales_fact___fact . time_key ) = "
@@ -302,9 +296,6 @@ public class TestColumnarSQLRewriter {
         + "and location_dim.location_name = 'test123' "
         + "where time_dim.time_key between '2013-01-01' and '2013-01-31' "
         + "group by fact.time_key,time_dim.day_of_week,time_dim.day " + "order 
by dollars_sold desc ";
-
-    SessionState.start(hconf);
-
     String rwq = qtest.rewrite(query, conf, hconf);
     String expected = "[(( location_dim___location_dim . location_name ) =  "
       + "'test123' ), , , ( time_dim___time_dim . time_key ) between  
'2013-01-01'  and  '2013-01-31' "
@@ -334,8 +325,6 @@ public class TestColumnarSQLRewriter {
         + "where time_dim.time_key between '2013-01-01' and '2013-01-31' "
         + "group by fact.time_key,time_dim.day_of_week,time_dim.day " + "order 
by dollars_sold desc ";
 
-    SessionState.start(hconf);
-
     String rwq = qtest.rewrite(query, conf, hconf);
     String actual = qtest.aggColumn.toString();
     String expected = "[sum((sales_fact___fact.dollars_sold)) as alias1, "
@@ -365,9 +354,6 @@ public class TestColumnarSQLRewriter {
         + "and location_dim.location_name = 'test123' "
         + "where time_dim.time_key between '2013-01-01' and '2013-01-31' "
         + "group by 
fact.time_key,time_dim.day_of_week,time_dim.day,item_dim.item_key " + "order by 
dollars_sold desc ";
-
-    SessionState.start(hconf);
-
     String rwq = qtest.rewrite(query, conf, hconf);
     String expected = 
"sales_fact___fact.time_key,sales_fact___fact.location_key,sales_fact___fact.item_key,";
     String actual = qtest.factKeys.toString();
@@ -393,9 +379,6 @@ public class TestColumnarSQLRewriter {
         + "and location_dim.location_name = 'test123' "
         + "where time_dim.time_key between '2013-01-01' and '2013-01-31' " + 
"and item_dim.item_name = 'item_1' "
         + "group by 
fact.time_key,time_dim.day_of_week,time_dim.day,item_dim.item_key " + "order by 
dollars_sold desc ";
-
-    SessionState.start(hconf);
-
     String rwq = qtest.rewrite(query, conf, hconf);
     String expected = "sales_fact___fact.time_key in  (  select time_dim 
.time_key from time_dim where "
       + "( time_dim. time_key ) between  '2013-01-01'  and  '2013-01-31'  ) 
and sales_fact___fact.location_key in  "
@@ -429,9 +412,6 @@ public class TestColumnarSQLRewriter {
         + "and item_dim.item_name = 'item_1' "
         + "group by 
fact.time_key,time_dim.day_of_week,time_dim.day,item_dim.item_key "
         + "order by dollars_sold  ";
-
-    SessionState.start(hconf);
-
     String actual = qtest.rewrite(query, conf, hconf);
 
     String expected = "select ( sales_fact___fact . time_key ), ( 
time_dim___time_dim . day_of_week ), "
@@ -499,9 +479,6 @@ public class TestColumnarSQLRewriter {
         + "and location_dim.location_name = 'test123' "
         + "where time_dim.time_key between '2013-03-01' and '2013-03-05' "
         + "group by fact.time_key,time_dim.day_of_week,time_dim.day " + "order 
by dollars_sold ";
-
-    SessionState.start(hconf);
-
     String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select ( sales_fact___fact . time_key ), ( 
time_dim___time_dim . day_of_week ), "
             + "( time_dim___time_dim . day ),  case  when (sum(alias2) =  0 ) 
then  0.0  "
@@ -567,9 +544,6 @@ public class TestColumnarSQLRewriter {
     String query = "SELECT  distinct ( location_dim . id ) FROM location_dim "
       + "location_dim join time_dim time_dim on location_dim.time_id = 
time_dim.id "
       + "WHERE ( time_dim . full_date ) between  '2013-01-01 00:00:00'  and  
'2013-01-04 00:00:00'  LIMIT 10 ";
-
-    SessionState.start(hconf);
-
     String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select  distinct ( location_dim___location_dim . id ) "
       + "from location_dim location_dim___location_dim  "
@@ -593,9 +567,6 @@ public class TestColumnarSQLRewriter {
         + "and location_dim.location_name = 'test123' "
         + "where time_dim.time_key between '2013-01-01' and '2013-01-31' " + 
"and item_dim.item_name = 'item_1' "
         + "group by 
fact.time_key,time_dim.day_of_week,time_dim.day,item_dim.item_key " + "order by 
dollars_sold desc ";
-
-    SessionState.start(hconf);
-
     String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select ( sales_fact___fact . time_key ), ( 
time_dim___time_dim . day_of_week ), "
             + "( time_dim___time_dim . day ), ( item_dim___item_dim . item_key 
), sum(alias1) as `dollars_sold` , "
@@ -646,9 +617,6 @@ public class TestColumnarSQLRewriter {
         + "where time_dim.time_key between '2013-01-01' and '2013-01-31' " + 
"and item_dim.item_name = 'item_1' "
         + "group by 
fact.time_key,time_dim.day_of_week,time_dim.day,item_dim.item_key "
         + "order by dollars_sold desc ";
-
-    SessionState.start(hconf);
-
     String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select ( sales_fact___fact . time_key ), ( 
time_dim___time_dim . day_of_week ), "
             + "( time_dim___time_dim . day ), ( item_dim___item_dim . item_key 
), "
@@ -696,9 +664,6 @@ public class TestColumnarSQLRewriter {
         + "where time_dim.time_key between date_add('2013-01-01', 1) and 
date_sub('2013-01-31',3) "
         + "and item_dim.item_name = 'item_1'  group by 
fact.time_key,time_dim.day_of_week,time_dim.day,"
         + "item_dim.item_key " + "order by dollars_sold";
-
-    SessionState.start(hconf);
-
     String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select max(alias1) from  (select 
sales_fact___fact.time_key, sales_fact___fact.location_key, "
         + "sales_fact___fact.item_key,max(( sales_fact___fact . dollars_sold 
)) as alias1 from sales_fact "
@@ -732,9 +697,6 @@ public class TestColumnarSQLRewriter {
     String query = "SELECT  count(location_dim.name) FROM location_dim "
         + "location_dim join time_dim time_dim on location_dim.time_id = 
time_dim.id "
         + "WHERE ( time_dim . full_date ) between  '2013-01-01 00:00:00'  and  
'2013-01-04 00:00:00'  LIMIT 10 ";
-
-    SessionState.start(hconf);
-
     String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select sum(alias1) from  (select 
location_dim___location_dim.time_id,"
         + "count(( location_dim___location_dim . name )) as alias1 from 
location_dim location_dim___location_dim "
@@ -756,9 +718,6 @@ public class TestColumnarSQLRewriter {
         + "inner join db.location_dim ld on fact.location_key = 
ld.location_key " + "and ld.location_name = 'test123' "
         + "where time_dim.time_key between '2013-01-01' and '2013-01-31' "
         + "group by fact.time_key,time_dim.day_of_week,time_dim.day " + "order 
by dollars_sold desc ";
-
-    SessionState.start(hconf);
-
     String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select ( sales_fact__db_sales_fact_fact . time_key ), ( 
time_dim___time_dim . day_of_week ), "
             + "( time_dim___time_dim . day ),  case  when (sum(alias2) =  0 ) "
@@ -795,9 +754,6 @@ public class TestColumnarSQLRewriter {
         + "INNER JOIN dim3 dim3 ON f.dim3_id = dim3.id " + "INNER JOIN dim4 
dim4 ON  dim2.id_2 = dim4.id_2 "
         + "WHERE ((dim1 . date) = '2014-11-25 00:00:00') "
         + "GROUP BY (dim1 . date),  (dim2 . name), (dim3 . name) , (dim4 . 
name) ";
-
-    SessionState.start(hconf);
-
     String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select ( dim1___dim1 . date ) as `date` , sum(alias1) 
as `msr1` , ( dim2___dim2 . name ) "
         + "as `dim2_name` , "
@@ -827,9 +783,6 @@ public class TestColumnarSQLRewriter {
         + "INNER JOIN dim2 dim2 ON f.dim2_id = dim2.id  and f.m3 > 3000 "
         + "WHERE ((dim1 . date) = '2014-11-25 00:00:00')  and f.m4  is not 
null "
         + "GROUP BY (dim1 . date),  (dim2 . name)";
-
-    SessionState.start(hconf);
-
     String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select ( dim1___dim1 . date ) as `date` , sum(alias1) 
as `msr1` , ( dim2___dim2 . name ) "
         + "as `dim2_name`  from  (select fact___f.dim1_id, fact___f.m2, 
fact___f.dim2_id, fact___f.m3, fact___f.m4, "
@@ -855,9 +808,6 @@ public class TestColumnarSQLRewriter {
         + "INNER JOIN dim2 dim2 ON f.dim2_id = dim2.id  and f.m3 > 3000 "
         + "WHERE ((dim1 . date) = '2014-11-25 00:00:00')  and f.m4  is not 
null "
         + "GROUP BY (dim1 . date),  (dim2 . name) ORDER BY dim1_date";
-
-    SessionState.start(hconf);
-
     String actual = qtest.rewrite(query, conf, hconf);
 
     String expected = "select ( dim1___dim1 . date ) as `dim1_date` , 
sum(alias1) as `msr1` , "
@@ -885,9 +835,6 @@ public class TestColumnarSQLRewriter {
         + "INNER JOIN dim2 dim2 ON f.dim2_id = dim2.id  and f.dim3_id = 
dim2.id "
         + "WHERE ((dim1 . date) = '2014-11-25 00:00:00')  and f.m4  is not 
null "
         + "GROUP BY (dim1 . date),  (dim2 . name) ORDER BY dim1_date";
-
-    SessionState.start(hconf);
-
     String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select ( dim1___dim1 . date ) as `dim1_date` , 
sum(alias1) as `msr1` , "
         + "( dim2___dim2 . name ) as `dim2_name`  from  (select 
fact___f.dim1_id, fact___f.m2, fact___f.dim2_id,"
@@ -918,9 +865,6 @@ public class TestColumnarSQLRewriter {
             + "where time_dim.time_key between '2013-01-01' and '2013-01-31' " 
+ "and item_dim.item_name = 'item_1' "
             + "group by 
fact.time_key,time_dim.day_of_week,time_dim.day,item_dim.item_key "
             + "order by dollars_sold desc ";
-
-    SessionState.start(hconf);
-
     String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select ( sales_fact___fact . time_key ), ( 
time_dim___time_dim . day_of_week ), "
             + "( time_dim___time_dim . day ), ( item_dim___item_dim . item_key 
),  "
@@ -969,9 +913,6 @@ public class TestColumnarSQLRewriter {
             + "and ld.location_name = 'test123' where time_dim.time_key 
between '2013-01-01' and '2013-01-31' "
             + "group by fact.time_key,time_dim.day_of_week,time_dim.day "
             + "order by dollars_sold desc";
-
-    SessionState.start(hconf);
-
     String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select ( sales_fact__db_sales_fact_fact . time_key ), ( 
time_dim___time_dim . day_of_week ), "
             + "( time_dim___time_dim . day ), ((sum(alias1) +  0 ) + 
(sum(alias2) +  0 )) as `expr1` , "
@@ -1017,9 +958,6 @@ public class TestColumnarSQLRewriter {
             + "ld.location_name = 'test123' where time_dim.time_key between 
'2013-01-01' and "
             + "'2013-01-31' group by 
fact.time_key,time_dim.day_of_week,time_dim.day  "
             + "order by dollars_sold desc";
-
-    SessionState.start(hconf);
-
     String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select ( sales_fact__db_sales_fact_fact . time_key ), ( 
time_dim___time_dim . day_of_week ), "
             + "( time_dim___time_dim . day ), ((sum(( 
sales_fact__db_sales_fact_fact . item_count )) +  0 ) + "
@@ -1059,9 +997,6 @@ public class TestColumnarSQLRewriter {
             + " and ld.location_name in ('test\\'123') "
             + "group by fact.time_key,time_dim.day_of_week,time_dim.day "
             + "order by dollars_sold desc";
-
-    SessionState.start(hconf);
-
     String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select ( sales_fact__db_sales_fact_fact . time_key ), ( 
time_dim___time_dim . day_of_week ),"
             + " ( time_dim___time_dim . day ), ((sum(alias1) +  0 ) + 
(sum(alias2) +  0 )) as `expr1` , "
@@ -1106,9 +1041,6 @@ public class TestColumnarSQLRewriter {
             + "where time_dim.time_key between '2013-01-01' and '2013-01-31'"
             + "group by fact.time_key "
             + "having sum(fact.dollar_sold) > 100 ";
-
-    SessionState.start(hconf);
-
     String actual = qtest.rewrite(query, conf, hconf);
     String expected = "select ( sales_fact__db_sales_fact_fact . time_key ) as 
`time_key` , "
             + "( time_dim___time_dim . day_of_week ), sum(alias1) as 
`total_item_sold`  from "
@@ -1143,8 +1075,8 @@ public class TestColumnarSQLRewriter {
     serdeUrls[1] = new URL("file:" + serdeJarFile.getAbsolutePath());
 
     URLClassLoader createTableClassLoader = new URLClassLoader(serdeUrls, 
hconf.getClassLoader());
-    hconf.setClassLoader(createTableClassLoader);
-    SessionState.start(hconf);
+    ClassLoader loader = new URLClassLoader(serdeUrls, 
SessionState.getSessionConf().getClassLoader());
+    SessionState.getSessionConf().setClassLoader(loader);
 
     // Create test table
     Database database = new Database();
@@ -1238,7 +1170,6 @@ public class TestColumnarSQLRewriter {
    */
   @Test
   public void testReplaceColumnMapping() throws Exception {
-    SessionState.start(hconf);
     String testDB = "testrcm";
 
     // Create test table

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-driver-jdbc/src/test/resources/hive-site.xml
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/test/resources/hive-site.xml 
b/lens-driver-jdbc/src/test/resources/hive-site.xml
index 2cb1d8d..b0dfc6e 100644
--- a/lens-driver-jdbc/src/test/resources/hive-site.xml
+++ b/lens-driver-jdbc/src/test/resources/hive-site.xml
@@ -36,9 +36,24 @@
   </property>
 
   <property>
+    <name>hive.exec.scratchdir</name>
+    <value>${project.build.directory}/hive/scratch</value>
+  </property>
+
+  <property>
     <name>javax.jdo.option.ConnectionURL</name>
     <value>jdbc:derby:;databaseName=target/metastore_db;create=true</value>
     <description>JDBC connect string for a JDBC metastore</description>
   </property>
 
+  <property>
+    <name>datanucleus.schema.autoCreateTables</name>
+    <value>true</value>
+  </property>
+
+  <property>
+    <name>hive.support.sql11.reserved.keywords</name>
+    <value>false</value>
+  </property>
+
 </configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-examples/pom.xml
----------------------------------------------------------------------
diff --git a/lens-examples/pom.xml b/lens-examples/pom.xml
index a13ee33..4144213 100644
--- a/lens-examples/pom.xml
+++ b/lens-examples/pom.xml
@@ -53,7 +53,6 @@
       <artifactId>lens-client</artifactId>
       <version>${project.version}</version>
     </dependency>
-
     <dependency>
       <groupId>commons-lang</groupId>
       <artifactId>commons-lang</artifactId>

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-ml-lib/pom.xml
----------------------------------------------------------------------
diff --git a/lens-ml-lib/pom.xml b/lens-ml-lib/pom.xml
index 0472b70..152d7c3 100644
--- a/lens-ml-lib/pom.xml
+++ b/lens-ml-lib/pom.xml
@@ -72,6 +72,10 @@
       <groupId>org.apache.hive</groupId>
       <artifactId>hive-exec</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-metastore</artifactId>
+    </dependency>
 
     <dependency>
       <groupId>org.testng</groupId>

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLResource.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLResource.java 
b/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLResource.java
index 51344ce..ce3db42 100644
--- a/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLResource.java
+++ b/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLResource.java
@@ -69,11 +69,6 @@ public class TestMLResource extends LensJerseyTest {
   private LensMLClient mlClient;
 
   @Override
-  protected int getTestPort() {
-    return 10002;
-  }
-
-  @Override
   protected Application configure() {
     return new MLApp(SessionResource.class, QueryServiceResource.class);
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java 
b/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java
index 3493709..ff47353 100644
--- a/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java
+++ b/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java
@@ -54,11 +54,6 @@ public class TestMLRunner extends LensJerseyTest {
   private LensMLClient mlClient;
 
   @Override
-  protected int getTestPort() {
-    return 10058;
-  }
-
-  @Override
   protected Application configure() {
     return new MLApp(SessionResource.class, QueryServiceResource.class, 
MetastoreResource.class);
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-query-lib/pom.xml
----------------------------------------------------------------------
diff --git a/lens-query-lib/pom.xml b/lens-query-lib/pom.xml
index 29dbbe1..5d8ca3e 100644
--- a/lens-query-lib/pom.xml
+++ b/lens-query-lib/pom.xml
@@ -52,6 +52,10 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hive</groupId>
+      <artifactId>hive-metastore</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
       <artifactId>hive-serde</artifactId>
     </dependency>
     <dependency>
@@ -70,5 +74,9 @@
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-client</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+    </dependency>
   </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-query-lib/src/main/java/org/apache/lens/lib/query/FilePersistentFormatter.java
----------------------------------------------------------------------
diff --git 
a/lens-query-lib/src/main/java/org/apache/lens/lib/query/FilePersistentFormatter.java
 
b/lens-query-lib/src/main/java/org/apache/lens/lib/query/FilePersistentFormatter.java
index b3e640f..5a6c39c 100644
--- 
a/lens-query-lib/src/main/java/org/apache/lens/lib/query/FilePersistentFormatter.java
+++ 
b/lens-query-lib/src/main/java/org/apache/lens/lib/query/FilePersistentFormatter.java
@@ -99,16 +99,13 @@ public class FilePersistentFormatter extends 
WrappedFileFormatter implements Per
    * @see 
org.apache.lens.server.api.query.PersistedOutputFormatter#addRowsFromPersistedPath(org.apache.hadoop.fs.Path)
    */
   @Override
-  public void addRowsFromPersistedPath(Path persistedDir) throws IOException {
-    FileSystem persistFs = persistedDir.getFileSystem(ctx.getConf());
+  public void addRowsFromPersistedPath(final Path persistedDir) throws 
IOException {
+    final FileSystem persistFs = persistedDir.getFileSystem(ctx.getConf());
 
     FileStatus[] partFiles = persistFs.listStatus(persistedDir, new 
PathFilter() {
       @Override
       public boolean accept(Path path) {
-        if (path.getName().startsWith("_")) {
-          return false;
-        }
-        return true;
+        return !path.getName().startsWith("_") && 
!path.getName().startsWith(".");
       }
     });
 

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-query-lib/src/test/java/org/apache/lens/lib/query/MockLensResultSetMetadata.java
----------------------------------------------------------------------
diff --git 
a/lens-query-lib/src/test/java/org/apache/lens/lib/query/MockLensResultSetMetadata.java
 
b/lens-query-lib/src/test/java/org/apache/lens/lib/query/MockLensResultSetMetadata.java
index 3b6c0e4..f7b60d5 100644
--- 
a/lens-query-lib/src/test/java/org/apache/lens/lib/query/MockLensResultSetMetadata.java
+++ 
b/lens-query-lib/src/test/java/org/apache/lens/lib/query/MockLensResultSetMetadata.java
@@ -18,14 +18,15 @@
  */
 package org.apache.lens.lib.query;
 
-import java.util.ArrayList;
+
 import java.util.List;
 
 import org.apache.lens.server.api.driver.LensResultSetMetadata;
 
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hive.service.cli.ColumnDescriptor;
+import org.apache.hive.service.cli.TypeDescriptor;
 
+import com.google.common.collect.Lists;
 import lombok.AllArgsConstructor;
 import lombok.NoArgsConstructor;
 
@@ -40,26 +41,26 @@ public class MockLensResultSetMetadata extends 
LensResultSetMetadata {
   }
 
   public static LensResultSetMetadata createMockedResultSet() {
-    List<ColumnDescriptor> columns = new ArrayList<ColumnDescriptor>();
-    columns.add(new ColumnDescriptor(new FieldSchema("firstcol", "int", ""), 
0));
-    columns.add(new ColumnDescriptor(new FieldSchema("format(secondcol,2)", 
"string", ""), 1));
-    columns.add(new ColumnDescriptor(new FieldSchema("thirdcol", 
"varchar(20)", ""), 2));
-    columns.add(new ColumnDescriptor(new FieldSchema("fourthcol", "char(15)", 
""), 3));
-    columns.add(new ColumnDescriptor(new FieldSchema("fifthcol", 
"array<tinyint>", ""), 4));
-    columns.add(new ColumnDescriptor(new FieldSchema("sixthcol", 
"struct<a:int,b:varchar(10)>", ""), 5));
-    columns.add(new ColumnDescriptor(new FieldSchema("seventhcol", 
"map<int,char(10)>", ""), 6));
-    return new MockLensResultSetMetadata(columns);
+    return new MockLensResultSetMetadata(Lists.newArrayList(
+      new ColumnDescriptor("firstcol", "", new TypeDescriptor("int"), 0),
+      new ColumnDescriptor("format(secondcol,2)", "", new 
TypeDescriptor("string"), 1),
+      new ColumnDescriptor("thirdcol", "", new TypeDescriptor("varchar(20)"), 
2),
+      new ColumnDescriptor("fourthcol", "", new TypeDescriptor("char(15)"), 3),
+      new ColumnDescriptor("fifthcol", "", new 
TypeDescriptor("array<tinyint>"), 4),
+      new ColumnDescriptor("sixthcol", "", new 
TypeDescriptor("struct<a:int,b:varchar(10)>"), 5),
+      new ColumnDescriptor("seventhcol", "", new 
TypeDescriptor("map<int,char(10)>"), 5)
+    ));
   }
 
   public static LensResultSetMetadata createMockedResultSetWithoutComma() {
-    List<ColumnDescriptor> columns = new ArrayList<ColumnDescriptor>();
-    columns.add(new ColumnDescriptor(new FieldSchema("firstcol", "int", ""), 
0));
-    columns.add(new ColumnDescriptor(new FieldSchema("secondcol", "string", 
""), 1));
-    columns.add(new ColumnDescriptor(new FieldSchema("thirdcol", 
"varchar(20)", ""), 2));
-    columns.add(new ColumnDescriptor(new FieldSchema("fourthcol", "char(15)", 
""), 3));
-    columns.add(new ColumnDescriptor(new FieldSchema("fifthcol", 
"array<tinyint>", ""), 4));
-    columns.add(new ColumnDescriptor(new FieldSchema("sixthcol", 
"struct<a:int,b:varchar(10)>", ""), 5));
-    columns.add(new ColumnDescriptor(new FieldSchema("seventhcol", 
"map<int,char(10)>", ""), 6));
-    return new MockLensResultSetMetadata(columns);
+    return new MockLensResultSetMetadata(Lists.newArrayList(
+      new ColumnDescriptor("firstcol", "", new TypeDescriptor("int"), 0),
+      new ColumnDescriptor("secondcol", "", new TypeDescriptor("string"), 1),
+      new ColumnDescriptor("thirdcol", "", new TypeDescriptor("varchar(20)"), 
2),
+      new ColumnDescriptor("fourthcol", "", new TypeDescriptor("char(15)"), 3),
+      new ColumnDescriptor("fifthcol", "", new 
TypeDescriptor("array<tinyint>"), 4),
+      new ColumnDescriptor("sixthcol", "", new 
TypeDescriptor("struct<a:int,b:varchar(10)>"), 5),
+      new ColumnDescriptor("seventhcol", "", new 
TypeDescriptor("map<int,char(10)>"), 6)
+    ));
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server-api/pom.xml
----------------------------------------------------------------------
diff --git a/lens-server-api/pom.xml b/lens-server-api/pom.xml
index 8304279..5508fb9 100644
--- a/lens-server-api/pom.xml
+++ b/lens-server-api/pom.xml
@@ -51,6 +51,18 @@
       <artifactId>hive-service</artifactId>
     </dependency>
     <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-shims</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-metastore</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+    </dependency>
+    <dependency>
       <groupId>org.testng</groupId>
       <artifactId>testng</artifactId>
     </dependency>

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensResultSetMetadata.java
----------------------------------------------------------------------
diff --git 
a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensResultSetMetadata.java
 
b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensResultSetMetadata.java
index da90e0a..2c4b86c 100644
--- 
a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensResultSetMetadata.java
+++ 
b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensResultSetMetadata.java
@@ -25,6 +25,7 @@ import java.util.List;
 import org.apache.lens.api.query.QueryResultSetMetadata;
 import org.apache.lens.api.query.ResultColumn;
 
+import org.apache.hadoop.hive.serde2.thrift.Type;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
@@ -68,7 +69,8 @@ public abstract class LensResultSetMetadata {
         throws IOException {
         ObjectCodec oc = jsonParser.getCodec();
         JsonNode node = oc.readTree(jsonParser);
-        org.apache.hive.service.cli.Type t = 
org.apache.hive.service.cli.Type.getType(node.get("type").asText());
+
+        Type t = Type.getType(node.get("type").asText());
         return new ColumnDescriptor(node.get("name").asText(), 
node.get("comment").asText(), new TypeDescriptor(t),
           node.get("position").asInt());
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server-api/src/main/java/org/apache/lens/server/api/query/save/param/ParameterDataTypeEncoder.java
----------------------------------------------------------------------
diff --git 
a/lens-server-api/src/main/java/org/apache/lens/server/api/query/save/param/ParameterDataTypeEncoder.java
 
b/lens-server-api/src/main/java/org/apache/lens/server/api/query/save/param/ParameterDataTypeEncoder.java
index 5295f68..3992511 100644
--- 
a/lens-server-api/src/main/java/org/apache/lens/server/api/query/save/param/ParameterDataTypeEncoder.java
+++ 
b/lens-server-api/src/main/java/org/apache/lens/server/api/query/save/param/ParameterDataTypeEncoder.java
@@ -84,7 +84,7 @@ public enum ParameterDataTypeEncoder {
     try {
       return 
ParameterDataTypeEncoder.valueOf(dataType.toString()).encode(rawValue);
     } catch (Throwable e) {
-      throw new ValueEncodeException(dataType, rawValue , e);
+      throw new ValueEncodeException(dataType, rawValue, e);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server-api/src/main/java/org/apache/lens/server/model/MappedDiagnosticLogSegregationContext.java
----------------------------------------------------------------------
diff --git 
a/lens-server-api/src/main/java/org/apache/lens/server/model/MappedDiagnosticLogSegregationContext.java
 
b/lens-server-api/src/main/java/org/apache/lens/server/model/MappedDiagnosticLogSegregationContext.java
index fd5f012..7780dff 100644
--- 
a/lens-server-api/src/main/java/org/apache/lens/server/model/MappedDiagnosticLogSegregationContext.java
+++ 
b/lens-server-api/src/main/java/org/apache/lens/server/model/MappedDiagnosticLogSegregationContext.java
@@ -50,4 +50,11 @@ public class MappedDiagnosticLogSegregationContext 
implements LogSegregationCont
   public void setQueryId(String id) {
     MDC.put(QUERY_LOG_ID, id);
   }
+
+  public static void put(String key, String value) {
+    MDC.put(key, value);
+  }
+  public static void remove(String key) {
+    MDC.remove(key);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server/pom.xml
----------------------------------------------------------------------
diff --git a/lens-server/pom.xml b/lens-server/pom.xml
index 75b8b32..b73cd05 100644
--- a/lens-server/pom.xml
+++ b/lens-server/pom.xml
@@ -50,9 +50,21 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hive</groupId>
+      <artifactId>hive-metastore</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
       <artifactId>hive-service</artifactId>
     </dependency>
     <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-service-rpc</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-shims</artifactId>
+    </dependency>
+    <dependency>
       <groupId>javax.ws.rs</groupId>
       <artifactId>javax.ws.rs-api</artifactId>
     </dependency>
@@ -236,6 +248,22 @@
       <version>${project.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>datanucleus-api-jdo</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>datanucleus-rdbms</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.derby</groupId>
+      <artifactId>derby</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.datanucleus</groupId>
+      <artifactId>javax.jdo</artifactId>
+    </dependency>
   </dependencies>
 
   <build>

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server/src/main/java/org/apache/lens/server/BaseLensService.java
----------------------------------------------------------------------
diff --git 
a/lens-server/src/main/java/org/apache/lens/server/BaseLensService.java 
b/lens-server/src/main/java/org/apache/lens/server/BaseLensService.java
index be31cd8..9f8ee72 100644
--- a/lens-server/src/main/java/org/apache/lens/server/BaseLensService.java
+++ b/lens-server/src/main/java/org/apache/lens/server/BaseLensService.java
@@ -18,11 +18,7 @@
  */
 package org.apache.lens.server;
 
-import java.io.Externalizable;
-import java.io.File;
-import java.io.IOException;
-import java.io.ObjectInput;
-import java.io.ObjectOutput;
+import java.io.*;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
@@ -47,7 +43,6 @@ import org.apache.lens.server.user.UserConfigLoaderFactory;
 import org.apache.lens.server.util.UtilityMethods;
 
 import org.apache.commons.lang3.StringUtils;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hive.service.CompositeService;
@@ -59,7 +54,7 @@ import org.apache.hive.service.cli.HandleIdentifier;
 import org.apache.hive.service.cli.HiveSQLException;
 import org.apache.hive.service.cli.SessionHandle;
 import org.apache.hive.service.cli.session.SessionManager;
-import org.apache.hive.service.cli.thrift.TSessionHandle;
+import org.apache.hive.service.rpc.thrift.TSessionHandle;
 
 import lombok.extern.slf4j.Slf4j;
 
@@ -224,7 +219,7 @@ public abstract class BaseLensService extends 
CompositeService implements Extern
   }
 
   protected LensEventService getEventService() {
-    LensEventService  eventService = 
LensServices.get().getService(LensEventService.NAME);
+    LensEventService eventService = 
LensServices.get().getService(LensEventService.NAME);
     if (eventService == null) {
       throw new NullPointerException("Could not get event service");
     }
@@ -247,10 +242,10 @@ public abstract class BaseLensService extends 
CompositeService implements Extern
     HandleIdentifier handleIdentifier = new 
HandleIdentifier(sessionHandle.getPublicId(), sessionHandle.getSecretId());
     SessionHandle hiveSessionHandle = new SessionHandle(new 
TSessionHandle(handleIdentifier.toTHandleIdentifier()));
     try {
-      SessionHandle restoredHandle = 
cliService.restoreSession(hiveSessionHandle, userName, password,
+      cliService.createSessionWithSessionHandle(hiveSessionHandle, userName, 
password,
         new HashMap<String, String>());
-      LensSessionHandle restoredSession = new 
LensSessionHandle(restoredHandle.getHandleIdentifier().getPublicId(),
-        restoredHandle.getHandleIdentifier().getSecretId());
+      LensSessionHandle restoredSession = new 
LensSessionHandle(hiveSessionHandle.getHandleIdentifier().getPublicId(),
+        hiveSessionHandle.getHandleIdentifier().getSecretId());
       SESSION_MAP.put(restoredSession.getPublicId().toString(), 
restoredSession);
       updateSessionsPerUser(userName);
     } catch (HiveSQLException e) {
@@ -271,14 +266,14 @@ public abstract class BaseLensService extends 
CompositeService implements Extern
         cliService.getHiveConf().setVar(var, 
cliService.getHiveConf().get(LensConfConstants.SERVER_DOMAIN));
       }
     }
-    String authType = 
cliService.getHiveConf().getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION);
+    String authType = 
getHiveConf().getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION);
     // No-op when authType is NOSASL
     if 
(!authType.equalsIgnoreCase(HiveAuthFactory.AuthTypes.NOSASL.toString())) {
       try {
         AuthenticationProviderFactory.AuthMethods authMethod = 
AuthenticationProviderFactory.AuthMethods
           .getValidAuthMethod(authType);
-        PasswdAuthenticationProvider provider = 
AuthenticationProviderFactory.getAuthenticationProvider(authMethod,
-          cliService.getHiveConf());
+        PasswdAuthenticationProvider provider = AuthenticationProviderFactory
+          .getAuthenticationProvider(authMethod, getHiveConf());
         provider.Authenticate(userName, password);
       } catch (Exception e) {
         log.error("Auth error: ", e);
@@ -334,7 +329,7 @@ public abstract class BaseLensService extends 
CompositeService implements Extern
    */
   public LensSessionImpl getSession(LensSessionHandle sessionHandle) {
     if (sessionHandle == null) {
-      throw new ClientErrorException("Session is null " + sessionHandle, 400);
+      throw new ClientErrorException("Session is null", 400);
     }
 
     try {

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server/src/main/java/org/apache/lens/server/LensServices.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/LensServices.java 
b/lens-server/src/main/java/org/apache/lens/server/LensServices.java
index a2d037a..46bae24 100644
--- a/lens-server/src/main/java/org/apache/lens/server/LensServices.java
+++ b/lens-server/src/main/java/org/apache/lens/server/LensServices.java
@@ -187,7 +187,7 @@ public class LensServices extends CompositeService 
implements ServiceProvider {
       conf.setVar(HiveConf.ConfVars.HIVE_SESSION_IMPL_CLASSNAME, 
LensSessionImpl.class.getCanonicalName());
       serviceMode = conf.getEnum(SERVER_MODE,
         SERVICE_MODE.valueOf(DEFAULT_SERVER_MODE));
-      cliService = new CLIService();
+      cliService = new CLIService(null);
       UserConfigLoaderFactory.init(conf);
       // Add default services
       addService(cliService);

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
----------------------------------------------------------------------
diff --git 
a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
 
b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
index 0127ef8..c503f16 100644
--- 
a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
+++ 
b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
@@ -21,6 +21,7 @@ package org.apache.lens.server.metastore;
 import static org.apache.lens.server.metastore.JAXBUtils.*;
 
 import java.util.*;
+import java.util.Date;
 
 import javax.ws.rs.BadRequestException;
 import javax.ws.rs.NotFoundException;
@@ -1326,9 +1327,8 @@ public class CubeMetastoreServiceImpl extends 
BaseLensService implements CubeMet
         tables = getNativeTablesFromDB(sessionid, dbName, false);
       } else {
         log.info("Getting tables from all dbs");
-        List<String> alldbs = getAllDatabases(sessionid);
-        tables = new ArrayList<String>();
-        for (String db : alldbs) {
+        tables = new ArrayList<>();
+        for (String db : getAllDatabases(sessionid)) {
           tables.addAll(getNativeTablesFromDB(sessionid, db, true));
         }
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
----------------------------------------------------------------------
diff --git 
a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java 
b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
index 79a628f..1e11325 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
@@ -36,7 +36,6 @@ import org.apache.lens.server.api.error.LensException;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Order;
-import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
@@ -953,9 +952,6 @@ public final class JAXBUtils {
       Class<? extends HiveOutputFormat> outputFormatClass =
         Class.forName(xp.getOutputFormat()).asSubclass(HiveOutputFormat.class);
       partition.setOutputFormatClass(outputFormatClass);
-      // Again a hack, for the issue described in HIVE-11278
-      partition.getTPartition().getSd().setOutputFormat(
-        HiveFileFormatUtils.getOutputFormatSubstitute(outputFormatClass, 
false).getName());
     }
     partition.getParameters().put(MetastoreConstants.PARTITION_UPDATE_PERIOD, 
xp.getUpdatePeriod().name());
     
partition.getTPartition().getSd().getSerdeInfo().setSerializationLib(xp.getSerdeClassname());

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
----------------------------------------------------------------------
diff --git 
a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
 
b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
index f5552dc..b12943e 100644
--- 
a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
+++ 
b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
@@ -895,6 +895,7 @@ public class QueryExecutionServiceImpl extends 
BaseLensService implements QueryE
   private void updateStatus(final QueryHandle handle) throws LensException {
     QueryContext ctx = allQueries.get(handle);
     if (ctx != null) {
+      logSegregationContext.setLogSegragationAndQueryId(ctx.getLogHandle());
       synchronized (ctx) {
         QueryStatus before = ctx.getStatus();
         if (!ctx.queued() && !ctx.finished() && 
!ctx.getDriverStatus().isFinished()) {

Reply via email to