HIVE-13424: Refactoring the code to pass a QueryState object rather than 
HiveConf object (Reviewed by Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/caa3ec76
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/caa3ec76
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/caa3ec76

Branch: refs/heads/master
Commit: caa3ec761c18d822259116fde9ff8a4f181df179
Parents: 86bdcbc
Author: Aihua Xu <aihu...@apache.org>
Authored: Tue Mar 15 13:12:57 2016 -0400
Committer: Aihua Xu <aihu...@apache.org>
Committed: Mon Apr 25 16:30:09 2016 -0400

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/cli/CliDriver.java   |   5 -
 .../mapreduce/TestHCatMultiOutputFormat.java    |   7 +-
 .../org/apache/hadoop/hive/ql/QTestUtil.java    |   9 +-
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  93 ++++-----------
 .../org/apache/hadoop/hive/ql/QueryState.java   | 114 +++++++++++++++++++
 .../hadoop/hive/ql/exec/ColumnStatsTask.java    |   5 +-
 .../hive/ql/exec/ColumnStatsUpdateTask.java     |   6 +-
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java |   9 +-
 .../apache/hadoop/hive/ql/exec/ExplainTask.java |   4 +-
 .../apache/hadoop/hive/ql/exec/FetchTask.java   |   5 +-
 .../hadoop/hive/ql/exec/FunctionTask.java       |   6 +-
 .../hadoop/hive/ql/exec/StatsNoJobTask.java     |   5 +-
 .../org/apache/hadoop/hive/ql/exec/Task.java    |   6 +-
 .../hadoop/hive/ql/exec/mr/ExecDriver.java      |  11 +-
 .../hive/ql/exec/mr/HadoopJobExecHelper.java    |  21 ++--
 .../hadoop/hive/ql/exec/mr/MapredLocalTask.java |   7 +-
 .../hadoop/hive/ql/exec/spark/SparkTask.java    |   7 +-
 .../hadoop/hive/ql/history/HiveHistoryImpl.java |   6 +-
 .../apache/hadoop/hive/ql/hooks/ATSHook.java    |   4 +-
 .../hadoop/hive/ql/hooks/HookContext.java       |  11 +-
 .../hive/ql/hooks/PostExecutePrinter.java       |  12 +-
 .../hadoop/hive/ql/hooks/PreExecutePrinter.java |  14 ++-
 .../hive/ql/index/TableBasedIndexHandler.java   |   2 -
 .../hadoop/hive/ql/io/merge/MergeFileTask.java  |   7 +-
 .../ql/io/rcfile/stats/PartialScanTask.java     |  12 +-
 .../io/rcfile/truncate/ColumnTruncateTask.java  |   7 +-
 .../hive/ql/optimizer/GenMRTableScan1.java      |   2 +-
 .../index/RewriteParseContextGenerator.java     |   7 +-
 .../RewriteQueryUsingAggregateIndexCtx.java     |   2 +-
 .../hive/ql/parse/BaseSemanticAnalyzer.java     |  14 ++-
 .../hadoop/hive/ql/parse/CalcitePlanner.java    |  11 +-
 .../ql/parse/ColumnStatsSemanticAnalyzer.java   |   7 +-
 .../hive/ql/parse/DDLSemanticAnalyzer.java      |  13 ++-
 .../parse/ExplainSQRewriteSemanticAnalyzer.java |   8 +-
 .../hive/ql/parse/ExplainSemanticAnalyzer.java  |   7 +-
 .../hive/ql/parse/ExportSemanticAnalyzer.java   |   5 +-
 .../hive/ql/parse/FunctionSemanticAnalyzer.java |   5 +-
 .../hive/ql/parse/ImportSemanticAnalyzer.java   |   5 +-
 .../hive/ql/parse/LoadSemanticAnalyzer.java     |   5 +-
 .../hive/ql/parse/MacroSemanticAnalyzer.java    |  12 +-
 .../hadoop/hive/ql/parse/ParseContext.java      |  14 ++-
 .../hive/ql/parse/ProcessAnalyzeTable.java      |   2 +-
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |  35 +++---
 .../hive/ql/parse/SemanticAnalyzerFactory.java  |  57 +++++-----
 .../hadoop/hive/ql/parse/TaskCompiler.java      |   9 +-
 .../hadoop/hive/ql/parse/TezCompiler.java       |   5 +-
 .../ql/parse/UpdateDeleteSemanticAnalyzer.java  |   5 +-
 .../parse/spark/SparkProcessAnalyzeTable.java   |   2 +-
 .../hadoop/hive/ql/session/SessionState.java    | 106 ++++++-----------
 .../hadoop/hive/ql/exec/TestExecDriver.java     |   7 +-
 .../ql/parse/TestMacroSemanticAnalyzer.java     |   7 +-
 .../hadoop/hive/ql/parse/TestQBCompact.java     |   8 +-
 .../ql/parse/TestQBJoinTreeApplyPredicate.java  |   7 +-
 .../hadoop/hive/ql/parse/TestQBSubQuery.java    |   7 +-
 .../ql/parse/TestSemanticAnalyzerFactory.java   |   9 +-
 .../parse/TestUpdateDeleteSemanticAnalyzer.java |   9 +-
 .../authorization/AuthorizationTestUtil.java    |  14 +--
 .../parse/authorization/PrivilegesTestBase.java |   5 +-
 .../TestHiveAuthorizationTaskFactory.java       |  12 +-
 .../parse/authorization/TestPrivilegesV1.java   |   9 +-
 .../parse/authorization/TestPrivilegesV2.java   |   8 +-
 .../hive/service/cli/operation/Operation.java   |  19 ++--
 .../service/cli/operation/SQLOperation.java     |  49 ++------
 .../cli/operation/SQLOperationDisplay.java      |   2 +-
 .../service/cli/session/HiveSessionImpl.java    |  40 +++----
 65 files changed, 508 insertions(+), 437 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
----------------------------------------------------------------------
diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java 
b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
index 2bcb56d..f467c81 100644
--- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
+++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
@@ -121,8 +121,6 @@ public class CliDriver {
 
     ss.updateThreadName();
 
-    conf.set(HiveConf.ConfVars.HIVEQUERYID.varname, QueryPlan.makeQueryId());
-
     // Flush the print stream, so it doesn't include output from the last 
command
     ss.err.flush();
     String cmd_trimmed = cmd.trim();
@@ -401,9 +399,6 @@ public class CliDriver {
         }
 
         ret = processCmd(command);
-        //wipe cli query state
-        SessionState ss = SessionState.get();
-        ss.setCommandType(null);
         command = "";
         lastRet = ret;
         boolean ignoreErrors = HiveConf.getBoolVar(conf, 
HiveConf.ConfVars.CLIIGNOREERRORS);

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
index 1964410..9fa263d 100644
--- 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
+++ 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
@@ -26,6 +26,7 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Random;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
@@ -40,6 +41,7 @@ import org.apache.hadoop.hive.metastore.api.SerDeInfo;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -369,7 +371,8 @@ public class TestHCatMultiOutputFormat {
    * @throws Exception if any error occurs
    */
   private List<String> getTableData(String table, String database) throws 
Exception {
-    HiveConf conf = new HiveConf();
+    QueryState queryState = new QueryState(null);
+    HiveConf conf = queryState.getConf();
     conf.addResource("hive-site.xml");
     ArrayList<String> results = new ArrayList<String>();
     ArrayList<String> temp = new ArrayList<String>();
@@ -392,7 +395,7 @@ public class TestHCatMultiOutputFormat {
     }
     FetchTask task = new FetchTask();
     task.setWork(work);
-    task.initialize(conf, null, null, new CompilationOpContext());
+    task.initialize(queryState, null, null, new CompilationOpContext());
     task.fetch(temp);
     for (String str : temp) {
       results.add(str.replace("\t", ","));

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 8473436..a6e8efa 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -163,6 +163,7 @@ public class QTestUtil {
   private static MiniClusterType clusterType = MiniClusterType.none;
   private ParseDriver pd;
   protected Hive db;
+  protected QueryState queryState;
   protected HiveConf conf;
   private Driver drv;
   private BaseSemanticAnalyzer sem;
@@ -389,10 +390,12 @@ public class QTestUtil {
       HiveConf.setHiveSiteLocation(new URL("file://"+ new 
File(confDir).toURI().getPath() + "/hive-site.xml"));
       System.out.println("Setting hive-site: "+HiveConf.getHiveSiteLocation());
     }
+
+    queryState = new QueryState(new HiveConf(Driver.class));
     if (useHBaseMetastore) {
       startMiniHBaseCluster();
     } else {
-      conf = new HiveConf(Driver.class);
+      conf = queryState.getConf();
     }
     this.hadoopVer = getHadoopMainVersion(hadoopVer);
     qMap = new TreeMap<String, String>();
@@ -922,7 +925,7 @@ public class QTestUtil {
     drv = new Driver(conf);
     drv.init();
     pd = new ParseDriver();
-    sem = new SemanticAnalyzer(conf);
+    sem = new SemanticAnalyzer(queryState);
   }
 
   public void init(String tname) throws Exception {
@@ -1648,7 +1651,7 @@ public class QTestUtil {
   public void resetParser() throws SemanticException {
     drv.init();
     pd = new ParseDriver();
-    sem = new SemanticAnalyzer(conf);
+    sem = new SemanticAnalyzer(queryState);
   }
 
 

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 2a06962..7f72efb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -165,6 +165,9 @@ public class Driver implements CommandProcessor {
   // For WebUI.  Kept alive after queryPlan is freed.
   private final QueryDisplay queryDisplay = new QueryDisplay();
 
+  // Query specific info
+  private QueryState queryState;
+
   private boolean checkConcurrency() {
     boolean supportConcurrency = 
conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
     if (!supportConcurrency) {
@@ -290,22 +293,28 @@ public class Driver implements CommandProcessor {
     this.maxRows = maxRows;
   }
 
+  public Driver() {
+    this(new QueryState((SessionState.get() != null) ?
+        SessionState.get().getConf() : new HiveConf()), null);
+  }
+
   /**
    * for backwards compatibility with current tests
    */
   public Driver(HiveConf conf) {
-    this.conf = conf;
-    isParallelEnabled = (conf != null)
-        && HiveConf.getBoolVar(conf, 
ConfVars.HIVE_SERVER2_PARALLEL_COMPILATION);
+    this(new QueryState(conf), null);
   }
 
   public Driver(HiveConf conf, String userName) {
-    this(conf);
-    this.userName = userName;
+    this(new QueryState(conf), userName);
   }
 
-  public Driver() {
-    this((SessionState.get() != null) ? SessionState.get().getConf() : null);
+  public Driver(QueryState queryState, String userName) {
+    this.queryState = queryState;
+    this.conf = queryState.getConf();
+    isParallelEnabled = (conf != null)
+        && HiveConf.getBoolVar(conf, 
ConfVars.HIVE_SERVER2_PARALLEL_COMPILATION);
+    this.userName = userName;
   }
 
   /**
@@ -320,52 +329,6 @@ public class Driver implements CommandProcessor {
   }
 
   /**
-   * Hold state variables specific to each query being executed, that may not
-   * be consistent in the overall SessionState
-   */
-  private static class QueryState {
-    private HiveOperation op;
-    private String cmd;
-    private boolean init = false;
-
-    /**
-     * Initialize the queryState with the query state variables
-     */
-    public void init(HiveOperation op, String cmd) {
-      this.op = op;
-      this.cmd = cmd;
-      this.init = true;
-    }
-
-    public boolean isInitialized() {
-      return this.init;
-    }
-
-    public HiveOperation getOp() {
-      return this.op;
-    }
-
-    public String getCmd() {
-      return this.cmd;
-    }
-  }
-
-  public void saveSession(QueryState qs) {
-    SessionState oldss = SessionState.get();
-    if (oldss != null && oldss.getHiveOperation() != null) {
-      qs.init(oldss.getHiveOperation(), oldss.getCmd());
-    }
-  }
-
-  public void restoreSession(QueryState qs) {
-    SessionState ss = SessionState.get();
-    if (ss != null && qs != null && qs.isInitialized()) {
-      ss.setCmd(qs.getCmd());
-      ss.setCommandType(qs.getOp());
-    }
-  }
-
-  /**
    * Compile a new query, but potentially reset taskID counter.  Not resetting 
task counter
    * is useful for generating re-entrant QL queries.
    * @param command  The HiveQL query to compile
@@ -392,9 +355,6 @@ public class Driver implements CommandProcessor {
       LOG.warn("WARNING! Query command could not be redacted." + e);
     }
 
-    //holder for parent command type/string when executing reentrant queries
-    QueryState queryState = new QueryState();
-
     if (ctx != null) {
       close();
     }
@@ -402,7 +362,6 @@ public class Driver implements CommandProcessor {
     if (resetTaskIds) {
       TaskFactory.resetId();
     }
-    saveSession(queryState);
 
     String queryId = conf.getVar(HiveConf.ConfVars.HIVEQUERYID);
 
@@ -447,7 +406,7 @@ public class Driver implements CommandProcessor {
 
 
       perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.ANALYZE);
-      BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree);
+      BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(queryState, tree);
       List<HiveSemanticAnalyzerHook> saHooks =
           getHooks(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK,
               HiveSemanticAnalyzerHook.class);
@@ -491,7 +450,7 @@ public class Driver implements CommandProcessor {
       schema = getSchema(sem, conf);
 
       plan = new QueryPlan(queryStr, sem, 
perfLogger.getStartTime(PerfLogger.DRIVER_RUN), queryId,
-        SessionState.get().getHiveOperation(), schema, queryDisplay);
+        queryState.getHiveOperation(), schema, queryDisplay);
 
       conf.setQueryString(queryStr);
 
@@ -500,7 +459,7 @@ public class Driver implements CommandProcessor {
 
       // initialize FetchTask right here
       if (plan.getFetchTask() != null) {
-        plan.getFetchTask().initialize(conf, plan, null, ctx.getOpContext());
+        plan.getFetchTask().initialize(queryState, plan, null, 
ctx.getOpContext());
       }
 
       //do the authorization check
@@ -509,7 +468,7 @@ public class Driver implements CommandProcessor {
 
         try {
           perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DO_AUTHORIZATION);
-          doAuthorization(sem, command);
+          doAuthorization(queryState.getHiveOperation(), sem, command);
         } catch (AuthorizationException authExp) {
           console.printError("Authorization failed:" + authExp.getMessage()
               + ". Use SHOW GRANT to get more details.");
@@ -562,7 +521,6 @@ public class Driver implements CommandProcessor {
       double duration = perfLogger.PerfLogEnd(CLASS_NAME, 
PerfLogger.COMPILE)/1000.00;
       ImmutableMap<String, Long> compileHMSTimings = 
dumpMetaCallTimingWithoutEx("compilation");
       queryDisplay.setHmsTimings(QueryDisplay.Phase.COMPILATION, 
compileHMSTimings);
-      restoreSession(queryState);
       LOG.info("Completed compiling command(queryId=" + queryId + "); Time 
taken: " + duration + " seconds");
     }
   }
@@ -589,7 +547,7 @@ public class Driver implements CommandProcessor {
       ASTNode astTree) throws IOException {
     String ret = null;
     ExplainTask task = new ExplainTask();
-    task.initialize(conf, plan, null, ctx.getOpContext());
+    task.initialize(queryState, plan, null, ctx.getOpContext());
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     PrintStream ps = new PrintStream(baos);
     try {
@@ -612,10 +570,9 @@ public class Driver implements CommandProcessor {
    * @throws HiveException
    * @throws AuthorizationException
    */
-  public static void doAuthorization(BaseSemanticAnalyzer sem, String command)
+  public static void doAuthorization(HiveOperation op, BaseSemanticAnalyzer 
sem, String command)
       throws HiveException, AuthorizationException {
     SessionState ss = SessionState.get();
-    HiveOperation op = ss.getHiveOperation();
     Hive db = sem.getDb();
 
     Set<ReadEntity> additionalInputs = new HashSet<ReadEntity>();
@@ -1530,7 +1487,7 @@ public class Driver implements CommandProcessor {
       resStream = null;
 
       SessionState ss = SessionState.get();
-      hookContext = new HookContext(plan, conf, ctx.getPathToCS(), 
ss.getUserName(),
+      hookContext = new HookContext(plan, queryState, ctx.getPathToCS(), 
ss.getUserName(),
           ss.getUserIpAddress(), operationId);
       hookContext.setHookType(HookContext.HookType.PRE_EXEC_HOOK);
 
@@ -1857,7 +1814,7 @@ public class Driver implements CommandProcessor {
       cxt.incCurJobNo(1);
       console.printInfo("Launching Job " + cxt.getCurJobNo() + " out of " + 
jobs);
     }
-    tsk.initialize(conf, plan, cxt, ctx.getOpContext());
+    tsk.initialize(queryState, plan, cxt, ctx.getOpContext());
     TaskResult tskRes = new TaskResult();
     TaskRunner tskRun = new TaskRunner(tsk, tskRes);
 
@@ -1958,7 +1915,7 @@ public class Driver implements CommandProcessor {
         throw new IOException("Error closing the current fetch task", e);
       }
       // FetchTask should not depend on the plan.
-      fetchTask.initialize(conf, null, null, ctx.getOpContext());
+      fetchTask.initialize(queryState, null, null, ctx.getOpContext());
     } else {
       ctx.resetStream();
       resStream = null;

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java 
b/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java
new file mode 100644
index 0000000..78715d8
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql;
+
+import java.sql.Timestamp;
+import java.util.Map;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+
+/**
+ * The class to store query level info such as queryId. Multiple queries can 
run
+ * in the same session, so SessionState is to hold common session related 
info, and
+ * each QueryState is to hold query related info.
+ *
+ */
+public class QueryState {
+  /**
+   * current configuration.
+   */
+  private final HiveConf queryConf;
+  /**
+   * type of the command.
+   */
+  private HiveOperation commandType;
+
+  public QueryState(HiveConf conf) {
+    this(conf, null, false);
+  }
+
+  public QueryState(HiveConf conf, Map<String, String> confOverlay, boolean 
runAsync) {
+    this.queryConf = createConf(conf, confOverlay, runAsync);
+  }
+
+  /**
+   * If there are query specific settings to overlay, then create a copy of 
config
+   * There are two cases we need to clone the session config that's being 
passed to hive driver
+   * 1. Async query -
+   *    If the client changes a config setting, that shouldn't reflect in the 
execution already underway
+   * 2. confOverlay -
+   *    The query specific settings should only be applied to the query config 
and not session
+   * @return new configuration
+   */
+  private HiveConf createConf(HiveConf conf,
+      Map<String, String> confOverlay,
+      boolean runAsync) {
+
+    if ( (confOverlay != null && !confOverlay.isEmpty()) ) {
+      conf = (conf == null ? new HiveConf() : new HiveConf(conf));
+
+      // apply overlay query specific settings, if any
+      for (Map.Entry<String, String> confEntry : confOverlay.entrySet()) {
+        try {
+          conf.verifyAndSet(confEntry.getKey(), confEntry.getValue());
+        } catch (IllegalArgumentException e) {
+          throw new RuntimeException("Error applying statement specific 
settings", e);
+        }
+      }
+    } else if (runAsync) {
+      conf = (conf == null ? new HiveConf() : new HiveConf(conf));
+    }
+
+    if (conf == null) {
+      conf = new HiveConf();
+    }
+
+    conf.setVar(HiveConf.ConfVars.HIVEQUERYID, QueryPlan.makeQueryId());
+    return conf;
+  }
+
+  public String getQueryId() {
+    return (queryConf.getVar(HiveConf.ConfVars.HIVEQUERYID));
+  }
+
+  public String getQueryString() {
+    return queryConf.getQueryString();
+  }
+
+  public String getCommandType() {
+    if (commandType == null) {
+      return null;
+    }
+    return commandType.getOperationName();
+  }
+
+  public HiveOperation getHiveOperation() {
+    return commandType;
+  }
+
+  public void setCommandType(HiveOperation commandType) {
+    this.commandType = commandType;
+  }
+
+  public HiveConf getConf() {
+    return queryConf;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
index 9059928..05dfa3b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
@@ -49,6 +49,7 @@ import 
org.apache.hadoop.hive.metastore.api.StringColumnStatsData;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
@@ -83,9 +84,9 @@ public class ColumnStatsTask extends Task<ColumnStatsWork> 
implements Serializab
   }
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx,
+  public void initialize(QueryState queryState, QueryPlan queryPlan, 
DriverContext ctx,
       CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, ctx, opContext);
+    super.initialize(queryState, queryPlan, ctx, opContext);
     work.initializeForFetch(opContext);
     try {
       JobConf job = new JobConf(conf);

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
index bca8a6c..9a6e5c9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
@@ -27,7 +27,6 @@ import java.util.Map.Entry;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
@@ -45,6 +44,7 @@ import 
org.apache.hadoop.hive.metastore.api.StringColumnStatsData;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
@@ -68,9 +68,9 @@ public class ColumnStatsUpdateTask extends 
Task<ColumnStatsUpdateWork> {
       .getLogger(ColumnStatsUpdateTask.class);
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx,
+  public void initialize(QueryState queryState, QueryPlan queryPlan, 
DriverContext ctx,
       CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, ctx, opContext);
+    super.initialize(queryState, queryPlan, ctx, opContext);
   }
 
   private ColumnStatistics constructColumnStatsFromInput()

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index b26f09d..d2c3ca8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -91,6 +91,7 @@ import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.ArchiveUtils.PartSpecInfo;
 import org.apache.hadoop.hive.ql.exec.tez.TezTask;
 import org.apache.hadoop.hive.ql.hooks.LineageInfo.DataContainer;
@@ -259,9 +260,9 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
   }
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx,
+  public void initialize(QueryState queryState, QueryPlan queryPlan, 
DriverContext ctx,
       CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, ctx, opContext);
+    super.initialize(queryState, queryPlan, ctx, opContext);
 
     // Pick the formatter to use to display the results.  Either the
     // normal human readable output or a json object.
@@ -663,7 +664,7 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
     }
 
     // initialize the task and execute
-    task.initialize(db.getConf(), getQueryPlan(), driverCxt, opContext);
+    task.initialize(queryState, getQueryPlan(), driverCxt, opContext);
     int ret = task.execute(driverCxt);
     return ret;
   }
@@ -4160,7 +4161,7 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
       truncateWork.setMapperCannotSpanPartns(true);
       DriverContext driverCxt = new DriverContext();
       ColumnTruncateTask taskExec = new ColumnTruncateTask();
-      taskExec.initialize(db.getConf(), null, driverCxt, null);
+      taskExec.initialize(queryState, null, driverCxt, null);
       taskExec.setWork(truncateWork);
       taskExec.setQueryPlan(this.getQueryPlan());
       return taskExec.execute(driverCxt);

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
index 4ce0864..403d57c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
@@ -312,7 +312,7 @@ public class ExplainTask extends Task<ExplainWork> 
implements Serializable {
       throws Exception {
 
     BaseSemanticAnalyzer analyzer = work.getAnalyzer();
-    HiveOperation operation = SessionState.get().getHiveOperation();
+    HiveOperation operation = queryState.getHiveOperation();
 
     JSONObject object = new JSONObject(new LinkedHashMap<>());
     Object jsonInput = toJson("INPUTS", toString(analyzer.getInputs()), out, 
work);
@@ -349,7 +349,7 @@ public class ExplainTask extends Task<ExplainWork> 
implements Serializable {
 
       SessionState.get().setActiveAuthorizer(authorizer);
       try {
-        Driver.doAuthorization(analyzer, "");
+        Driver.doAuthorization(queryState.getHiveOperation(), analyzer, "");
       } finally {
         SessionState.get().setActiveAuthorizer(delegate);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
index b96ea04..ec9e98e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.mr.ExecMapper;
 import org.apache.hadoop.hive.ql.io.AcidUtils;
 import org.apache.hadoop.hive.ql.io.HiveInputFormat;
@@ -59,9 +60,9 @@ public class FetchTask extends Task<FetchWork> implements 
Serializable {
   }
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx,
+  public void initialize(QueryState queryState, QueryPlan queryPlan, 
DriverContext ctx,
       CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, ctx, opContext);
+    super.initialize(queryState, queryPlan, ctx, opContext);
     work.initializeForFetch(opContext);
 
     try {

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
index 1b971fc..42cdc84 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
@@ -25,6 +25,7 @@ import java.util.List;
 
 import com.google.common.collect.HashMultimap;
 import com.google.common.collect.Multimap;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileSystem;
@@ -37,6 +38,7 @@ import 
org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.CreateFunctionDesc;
@@ -62,9 +64,9 @@ public class FunctionTask extends Task<FunctionWork> {
   }
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx,
+  public void initialize(QueryState queryState, QueryPlan queryPlan, 
DriverContext ctx,
       CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, ctx, opContext);
+    super.initialize(queryState, queryPlan, ctx, opContext);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java
index 3199ee1..d5ae019 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java
@@ -41,6 +41,7 @@ import 
org.apache.hadoop.hive.metastore.api.InvalidOperationException;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.io.StatsProvidingRecordReader;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -84,9 +85,9 @@ public class StatsNoJobTask extends Task<StatsNoJobWork> 
implements Serializable
   }
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext 
driverContext,
+  public void initialize(QueryState queryState, QueryPlan queryPlan, 
DriverContext driverContext,
       CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, driverContext, opContext);
+    super.initialize(queryState, queryPlan, driverContext, opContext);
     jc = new JobConf(conf);
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
index 6c677f5..897af5e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
@@ -51,6 +51,7 @@ public abstract class Task<T extends Serializable> implements 
Serializable, Node
   public transient HashMap<String, Long> taskCounters;
   public transient TaskHandle taskHandle;
   protected transient HiveConf conf;
+  protected transient QueryState queryState;
   protected transient LogHelper console;
   protected transient QueryPlan queryPlan;
   protected transient DriverContext driverContext;
@@ -124,11 +125,12 @@ public abstract class Task<T extends Serializable> 
implements Serializable, Node
     return taskHandle;
   }
 
-  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext 
driverContext,
+  public void initialize(QueryState queryState, QueryPlan queryPlan, 
DriverContext driverContext,
       CompilationOpContext opContext) {
     this.queryPlan = queryPlan;
     setInitialized();
-    this.conf = conf;
+    this.queryState = queryState;
+    this.conf = queryState.getConf();
     this.driverContext = driverContext;
     console = new LogHelper(LOG);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
index d164859..639b0da 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
@@ -51,6 +51,7 @@ import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.FetchOperator;
 import org.apache.hadoop.hive.ql.exec.HiveTotalOrderPartitioner;
 import org.apache.hadoop.hive.ql.exec.Operator;
@@ -124,7 +125,7 @@ public class ExecDriver extends Task<MapredWork> implements 
Serializable, Hadoop
   public ExecDriver() {
     super();
     console = new LogHelper(LOG);
-    this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this);
+    this.jobExecHelper = new HadoopJobExecHelper(queryState, job, console, 
this, this);
   }
 
   @Override
@@ -142,9 +143,9 @@ public class ExecDriver extends Task<MapredWork> implements 
Serializable, Hadoop
    * Initialization when invoked from QL.
    */
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext 
driverContext,
+  public void initialize(QueryState queryState, QueryPlan queryPlan, 
DriverContext driverContext,
       CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, driverContext, opContext);
+    super.initialize(queryState, queryPlan, driverContext, opContext);
 
     job = new JobConf(conf, ExecDriver.class);
 
@@ -168,7 +169,7 @@ public class ExecDriver extends Task<MapredWork> implements 
Serializable, Hadoop
       HiveConf.setVar(job, ConfVars.HIVEADDEDARCHIVES, addedArchives);
     }
     conf.stripHiddenConfigurations(job);
-    this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this);
+    this.jobExecHelper = new HadoopJobExecHelper(queryState, job, console, 
this, this);
   }
 
   /**
@@ -178,7 +179,7 @@ public class ExecDriver extends Task<MapredWork> implements 
Serializable, Hadoop
     setWork(plan);
     this.job = job;
     console = new LogHelper(LOG, isSilent);
-    this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this);
+    this.jobExecHelper = new HadoopJobExecHelper(queryState, job, console, 
this, this);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
index 1b296b9..760ba6c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
@@ -33,6 +33,7 @@ import java.util.concurrent.TimeUnit;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.MapRedStats;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskHandle;
@@ -75,6 +76,7 @@ public class HadoopJobExecHelper {
   public transient JobID jobId;
   private final LogHelper console;
   private final HadoopJobExecHook callBackObj;
+  private final QueryState queryState;
 
   /**
    * Update counters relevant to this task.
@@ -135,8 +137,9 @@ public class HadoopJobExecHelper {
     this.jobId = jobId;
   }
 
-  public HadoopJobExecHelper(JobConf job, LogHelper console,
+  public HadoopJobExecHelper(QueryState queryState, JobConf job, LogHelper 
console,
       Task<? extends Serializable> task, HadoopJobExecHook hookCallBack) {
+    this.queryState = queryState;
     this.job = job;
     this.console = console;
     this.task = task;
@@ -250,14 +253,14 @@ public class HadoopJobExecHelper {
 
           String logMapper;
           String logReducer;
-
+          String queryId = queryState.getQueryId();
           TaskReport[] mappers = jc.getMapTaskReports(rj.getID());
           if (mappers == null) {
             logMapper = "no information for number of mappers; ";
           } else {
             numMap = mappers.length;
             if (ss != null) {
-              
ss.getHiveHistory().setTaskProperty(SessionState.get().getQueryId(), getId(),
+              ss.getHiveHistory().setTaskProperty(queryId, getId(),
                 Keys.TASK_NUM_MAPPERS, Integer.toString(numMap));
             }
             logMapper = "number of mappers: " + numMap + "; ";
@@ -269,7 +272,7 @@ public class HadoopJobExecHelper {
           } else {
             numReduce = reducers.length;
             if (ss != null) {
-              
ss.getHiveHistory().setTaskProperty(SessionState.get().getQueryId(), getId(),
+              ss.getHiveHistory().setTaskProperty(queryId, getId(),
                 Keys.TASK_NUM_REDUCERS, Integer.toString(numReduce));
             }
             logReducer = "number of reducers: " + numReduce;
@@ -355,11 +358,11 @@ public class HadoopJobExecHelper {
       String output = report.toString();
       SessionState ss = SessionState.get();
       if (ss != null) {
-        ss.getHiveHistory().setTaskCounters(SessionState.get().getQueryId(), 
getId(), ctrs);
-        ss.getHiveHistory().setTaskProperty(SessionState.get().getQueryId(), 
getId(),
+        ss.getHiveHistory().setTaskCounters(queryState.getQueryId(), getId(), 
ctrs);
+        ss.getHiveHistory().setTaskProperty(queryState.getQueryId(), getId(),
             Keys.TASK_HADOOP_PROGRESS, output);
         if 
(ss.getConf().getBoolVar(HiveConf.ConfVars.HIVE_LOG_INCREMENTAL_PLAN_PROGRESS)) 
{
-          ss.getHiveHistory().progressTask(SessionState.get().getQueryId(), 
this.task);
+          ss.getHiveHistory().progressTask(queryState.getQueryId(), this.task);
           this.callBackObj.logPlanProgress(ss);
         }
       }
@@ -386,7 +389,7 @@ public class HadoopJobExecHelper {
       } else {
         SessionState ss = SessionState.get();
         if (ss != null) {
-          ss.getHiveHistory().setTaskCounters(SessionState.get().getQueryId(), 
getId(), ctrs);
+          ss.getHiveHistory().setTaskCounters(queryState.getQueryId(), 
getId(), ctrs);
         }
         success = rj.isSuccessful();
       }
@@ -430,7 +433,7 @@ public class HadoopJobExecHelper {
       console.printInfo("Job running in-process (local Hadoop)");
     } else {
       if (SessionState.get() != null) {
-        
SessionState.get().getHiveHistory().setTaskProperty(SessionState.get().getQueryId(),
+        
SessionState.get().getHiveHistory().setTaskProperty(queryState.getQueryId(),
             getId(), Keys.TASK_HADOOP_ID, rj.getID().toString());
       }
       console.printInfo(getJobStartMsg(rj.getID()) + ", Tracking URL = "

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
index c81b14c..3c1f0de 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
@@ -48,6 +48,7 @@ import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.BucketMatcher;
 import org.apache.hadoop.hive.ql.exec.FetchOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
@@ -121,13 +122,13 @@ public class MapredLocalTask extends 
Task<MapredLocalWork> implements Serializab
   }
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext 
driverContext,
+  public void initialize(QueryState queryState, QueryPlan queryPlan, 
DriverContext driverContext,
       CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, driverContext, opContext);
+    super.initialize(queryState, queryPlan, driverContext, opContext);
     job = new JobConf(conf, ExecDriver.class);
     execContext = new ExecMapperContext(job);
     //we don't use the HadoopJobExecHooks for local tasks
-    this.jobExecHelper = new HadoopJobExecHelper(job, console, this, null);
+    this.jobExecHelper = new HadoopJobExecHelper(queryState, job, console, 
this, null);
   }
 
   public static String now() {

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
index 7f87adf..0b494aa 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
 import org.apache.hadoop.hive.ql.exec.JoinOperator;
 import org.apache.hadoop.hive.ql.exec.MapOperator;
@@ -71,9 +72,9 @@ public class SparkTask extends Task<SparkWork> {
   private static final long serialVersionUID = 1L;
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext 
driverContext,
+  public void initialize(QueryState queryState, QueryPlan queryPlan, 
DriverContext driverContext,
       CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, driverContext, opContext);
+    super.initialize(queryState, queryPlan, driverContext, opContext);
   }
 
   @Override
@@ -137,7 +138,7 @@ public class SparkTask extends Task<SparkWork> {
     console.printInfo("Starting Spark Job = " + jobRef.getJobId());
     if (SessionState.get() != null) {
       SessionState.get().getHiveHistory()
-         .setQueryProperty(SessionState.get().getQueryId(), Keys.SPARK_JOB_ID, 
jobRef.getJobId());
+         .setQueryProperty(queryState.getQueryId(), Keys.SPARK_JOB_ID, 
jobRef.getJobId());
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java 
b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java
index a1e35cb..0234fd9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java
@@ -266,13 +266,9 @@ public class HiveHistoryImpl implements HiveHistory{
   @Override
   public void startTask(String queryId, Task<? extends Serializable> task,
       String taskName) {
-    SessionState ss = SessionState.get();
-    if (ss == null) {
-      return;
-    }
     TaskInfo ti = new TaskInfo();
 
-    ti.hm.put(Keys.QUERY_ID.name(), ss.getQueryId());
+    ti.hm.put(Keys.QUERY_ID.name(), queryId);
     ti.hm.put(Keys.TASK_ID.name(), task.getId());
     ti.hm.put(Keys.TASK_NAME.name(), taskName);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java 
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
index b7e70be..742edc8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
@@ -25,6 +25,7 @@ import java.util.concurrent.TimeUnit;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.ExplainTask;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -94,6 +95,7 @@ public class ATSHook implements ExecuteWithHookContext {
   public void run(final HookContext hookContext) throws Exception {
     final long currentTime = System.currentTimeMillis();
     final HiveConf conf = new HiveConf(hookContext.getConf());
+    final QueryState queryState = hookContext.getQueryState();
 
     executor.submit(new Runnable() {
         @Override
@@ -134,7 +136,7 @@ public class ATSHook implements ExecuteWithHookContext {
             );
               @SuppressWarnings("unchecked")
               ExplainTask explain = (ExplainTask) TaskFactory.get(work, conf);
-              explain.initialize(conf, plan, null, null);
+              explain.initialize(queryState, plan, null, null);
               String query = plan.getQueryStr();
               JSONObject explainPlan = explain.getJSONPlan(null, work);
               String logID = 
conf.getLogIdVar(SessionState.get().getSessionId());

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java 
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java
index 8e1672f..8db0124 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java
@@ -27,6 +27,7 @@ import java.util.Set;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.TaskRunner;
 import org.apache.hadoop.hive.ql.optimizer.lineage.LineageCtx.Index;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -43,6 +44,7 @@ public class HookContext {
   }
 
   private QueryPlan queryPlan;
+  private final QueryState queryState;
   private HiveConf conf;
   private List<TaskRunner> completeTaskList;
   private Set<ReadEntity> inputs;
@@ -60,11 +62,12 @@ public class HookContext {
   // TExecuteStatementResp.TOperationHandle.THandleIdentifier.guid
   private final String operationId;
 
-  public HookContext(QueryPlan queryPlan, HiveConf conf,
+  public HookContext(QueryPlan queryPlan, QueryState queryState,
       Map<String, ContentSummary> inputPathToContentSummary, String userName, 
String ipAddress,
       String operationId) throws Exception {
     this.queryPlan = queryPlan;
-    this.conf = conf;
+    this.queryState = queryState;
+    this.conf = queryState.getConf();
     this.inputPathToContentSummary = inputPathToContentSummary;
     completeTaskList = new ArrayList<TaskRunner>();
     inputs = queryPlan.getInputs();
@@ -192,4 +195,8 @@ public class HookContext {
   public String getOperationId() {
     return operationId;
   }
+
+  public QueryState getQueryState() {
+    return queryState;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java 
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java
index 4518315..b4fc125 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java
@@ -27,6 +27,7 @@ import java.util.Set;
 
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.hooks.HookContext.HookType;
 import org.apache.hadoop.hive.ql.hooks.LineageInfo.BaseColumnInfo;
 import org.apache.hadoop.hive.ql.hooks.LineageInfo.Dependency;
@@ -97,15 +98,14 @@ public class PostExecutePrinter implements 
ExecuteWithHookContext {
   @Override
   public void run(HookContext hookContext) throws Exception {
     assert(hookContext.getHookType() == HookType.POST_EXEC_HOOK);
-    SessionState ss = SessionState.get();
     Set<ReadEntity> inputs = hookContext.getInputs();
     Set<WriteEntity> outputs = hookContext.getOutputs();
     LineageInfo linfo = hookContext.getLinfo();
     UserGroupInformation ugi = hookContext.getUgi();
-    this.run(ss,inputs,outputs,linfo,ugi);
+    this.run(hookContext.getQueryState(),inputs,outputs,linfo,ugi);
   }
 
-  public void run(SessionState sess, Set<ReadEntity> inputs,
+  public void run(QueryState queryState, Set<ReadEntity> inputs,
       Set<WriteEntity> outputs, LineageInfo linfo,
       UserGroupInformation ugi) throws Exception {
 
@@ -115,9 +115,9 @@ public class PostExecutePrinter implements 
ExecuteWithHookContext {
       return;
     }
 
-    if (sess != null) {
-      console.printError("POSTHOOK: query: " + sess.getCmd().trim());
-      console.printError("POSTHOOK: type: " + sess.getCommandType());
+    if (queryState != null) {
+      console.printError("POSTHOOK: query: " + 
queryState.getQueryString().trim());
+      console.printError("POSTHOOK: type: " + queryState.getCommandType());
     }
 
     PreExecutePrinter.printEntities(console, inputs, "POSTHOOK: Input: ");

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java 
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java
index b5a26d8..232c62d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java
@@ -24,6 +24,7 @@ import java.util.List;
 import java.util.Set;
 
 import org.apache.hadoop.hive.common.io.FetchConverter;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.hooks.HookContext.HookType;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -40,8 +41,9 @@ public class PreExecutePrinter implements 
ExecuteWithHookContext {
   public void run(HookContext hookContext) throws Exception {
     assert(hookContext.getHookType() == HookType.PRE_EXEC_HOOK);
     SessionState ss = SessionState.get();
+    QueryState queryState = hookContext.getQueryState();
     if (ss != null && ss.out instanceof FetchConverter) {
-      boolean foundQuery = ss.getHiveOperation() == HiveOperation.QUERY &&
+      boolean foundQuery = queryState.getHiveOperation() == 
HiveOperation.QUERY &&
               !hookContext.getQueryPlan().isForExplain();
       ((FetchConverter)ss.out).foundQuery(foundQuery);
     }
@@ -49,10 +51,10 @@ public class PreExecutePrinter implements 
ExecuteWithHookContext {
     Set<ReadEntity> inputs = hookContext.getInputs();
     Set<WriteEntity> outputs = hookContext.getOutputs();
     UserGroupInformation ugi = hookContext.getUgi();
-    this.run(ss,inputs,outputs,ugi);
+    this.run(queryState,inputs,outputs,ugi);
   }
 
-  public void run(SessionState sess, Set<ReadEntity> inputs,
+  public void run(QueryState queryState, Set<ReadEntity> inputs,
       Set<WriteEntity> outputs, UserGroupInformation ugi)
     throws Exception {
 
@@ -62,9 +64,9 @@ public class PreExecutePrinter implements 
ExecuteWithHookContext {
       return;
     }
 
-    if (sess != null) {
-      console.printError("PREHOOK: query: " + sess.getCmd().trim());
-      console.printError("PREHOOK: type: " + sess.getCommandType());
+    if (queryState != null) {
+      console.printError("PREHOOK: query: " + 
queryState.getQueryString().trim());
+      console.printError("PREHOOK: type: " + queryState.getCommandType());
     }
 
     printEntities(console, inputs, "PREHOOK: Input: ");

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/index/TableBasedIndexHandler.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/index/TableBasedIndexHandler.java 
b/ql/src/java/org/apache/hadoop/hive/ql/index/TableBasedIndexHandler.java
index 807959e..29886ae 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/index/TableBasedIndexHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/index/TableBasedIndexHandler.java
@@ -26,8 +26,6 @@ import java.util.Set;
 import java.util.Map.Entry;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.common.StatsSetupConst;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Index;
 import org.apache.hadoop.hive.ql.exec.Task;

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java
index 82629c1..6b0343b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -60,11 +61,11 @@ public class MergeFileTask extends Task<MergeFileWork> 
implements Serializable,
   private boolean success = true;
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan,
+  public void initialize(QueryState queryState, QueryPlan queryPlan,
       DriverContext driverContext, CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, driverContext, opContext);
+    super.initialize(queryState, queryPlan, driverContext, opContext);
     job = new JobConf(conf, MergeFileTask.class);
-    jobExecHelper = new HadoopJobExecHelper(job, this.console, this, this);
+    jobExecHelper = new HadoopJobExecHelper(queryState, job, this.console, 
this, this);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
index 71371a3..d31510d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper;
@@ -84,11 +85,11 @@ public class PartialScanTask extends Task<PartialScanWork> 
implements
   protected HadoopJobExecHelper jobExecHelper;
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan,
+  public void initialize(QueryState queryState, QueryPlan queryPlan,
       DriverContext driverContext, CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, driverContext, opContext);
+    super.initialize(queryState, queryPlan, driverContext, opContext);
     job = new JobConf(conf, PartialScanTask.class);
-    jobExecHelper = new HadoopJobExecHelper(job, this.console, this, this);
+    jobExecHelper = new HadoopJobExecHelper(queryState, job, this.console, 
this, this);
   }
 
   @Override
@@ -331,7 +332,6 @@ public class PartialScanTask extends Task<PartialScanWork> 
implements
     if (jobConfFileName != null) {
       conf.addResource(new Path(jobConfFileName));
     }
-    HiveConf hiveConf = new HiveConf(conf, PartialScanTask.class);
 
     org.slf4j.Logger LOG = 
LoggerFactory.getLogger(PartialScanTask.class.getName());
     boolean isSilent = HiveConf.getBoolVar(conf,
@@ -348,11 +348,11 @@ public class PartialScanTask extends 
Task<PartialScanWork> implements
       }
     }
 
-
+    QueryState queryState = new QueryState(new HiveConf(conf, 
PartialScanTask.class));
     PartialScanWork mergeWork = new PartialScanWork(inputPaths);
     DriverContext driverCxt = new DriverContext();
     PartialScanTask taskExec = new PartialScanTask();
-    taskExec.initialize(hiveConf, null, driverCxt, new CompilationOpContext());
+    taskExec.initialize(queryState, null, driverCxt, new 
CompilationOpContext());
     taskExec.setWork(mergeWork);
     int ret = taskExec.execute(driverCxt);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
index bc21da0..8acd6e0 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper;
@@ -59,11 +60,11 @@ public class ColumnTruncateTask extends 
Task<ColumnTruncateWork> implements Seri
   protected HadoopJobExecHelper jobExecHelper;
 
   @Override
-  public void initialize(HiveConf conf, QueryPlan queryPlan,
+  public void initialize(QueryState queryState, QueryPlan queryPlan,
       DriverContext driverContext, CompilationOpContext opContext) {
-    super.initialize(conf, queryPlan, driverContext, opContext);
+    super.initialize(queryState, queryPlan, driverContext, opContext);
     job = new JobConf(conf, ColumnTruncateTask.class);
-    jobExecHelper = new HadoopJobExecHelper(job, this.console, this, this);
+    jobExecHelper = new HadoopJobExecHelper(queryState, job, this.console, 
this, this);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
index 9c979be..669d56f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
@@ -204,7 +204,7 @@ public class GenMRTableScan1 implements NodeProcessor {
     // partial scan task
     DriverContext driverCxt = new DriverContext();
     Task<PartialScanWork> psTask = TaskFactory.get(scanWork, 
parseCtx.getConf());
-    psTask.initialize(parseCtx.getConf(), null, driverCxt, 
op.getCompilationOpContext());
+    psTask.initialize(parseCtx.getQueryState(), null, driverCxt, 
op.getCompilationOpContext());
     psTask.setWork(scanWork);
 
     // task dependency

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java
index 64f9734..340d29a 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java
@@ -24,6 +24,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
@@ -56,16 +57,16 @@ public final class RewriteParseContextGenerator {
    * @param command
    * @throws SemanticException
    */
-  public static Operator<? extends OperatorDesc> generateOperatorTree(HiveConf 
conf,
+  public static Operator<? extends OperatorDesc> 
generateOperatorTree(QueryState queryState,
       String command) throws SemanticException {
     Operator<? extends OperatorDesc> operatorTree;
     try {
-      Context ctx = new Context(conf);
+      Context ctx = new Context(queryState.getConf());
       ParseDriver pd = new ParseDriver();
       ASTNode tree = pd.parse(command, ctx);
       tree = ParseUtils.findRootNonNullToken(tree);
 
-      BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree);
+      BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(queryState, tree);
       assert(sem instanceof SemanticAnalyzer);
       operatorTree = doSemanticAnalysis((SemanticAnalyzer) sem, tree, ctx);
       LOG.info("Sub-query Semantic Analysis Completed");

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java
index 74bedcb..3d11907 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java
@@ -268,7 +268,7 @@ public final class RewriteQueryUsingAggregateIndexCtx  
implements NodeProcessorC
           + rewriteQueryCtx.getIndexKey() + " ";
       // retrieve the operator tree for the query, and the required 
GroupByOperator from it
       Operator<?> newOperatorTree = 
RewriteParseContextGenerator.generateOperatorTree(
-              rewriteQueryCtx.getParseContext().getConf(),
+              rewriteQueryCtx.getParseContext().getQueryState(),
               selReplacementCommand);
 
       // we get our new GroupByOperator here

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index 19342a8..cbb1b0a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -48,6 +48,7 @@ import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryProperties;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -87,6 +88,7 @@ public abstract class BaseSemanticAnalyzer {
   // Assumes one instance of this + single-threaded compilation for each query.
   protected final Hive db;
   protected final HiveConf conf;
+  protected final QueryState queryState;
   protected List<Task<? extends Serializable>> rootTasks;
   protected FetchTask fetchTask;
   protected final Logger LOG;
@@ -199,13 +201,14 @@ public abstract class BaseSemanticAnalyzer {
     }
   }
 
-  public BaseSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    this(conf, createHiveDB(conf));
+  public BaseSemanticAnalyzer(QueryState queryState) throws SemanticException {
+    this(queryState, createHiveDB(queryState.getConf()));
   }
 
-  public BaseSemanticAnalyzer(HiveConf conf, Hive db) throws SemanticException 
{
+  public BaseSemanticAnalyzer(QueryState queryState, Hive db) throws 
SemanticException {
     try {
-      this.conf = conf;
+      this.queryState = queryState;
+      this.conf = queryState.getConf();
       this.db = db;
       rootTasks = new ArrayList<Task<? extends Serializable>>();
       LOG = LoggerFactory.getLogger(this.getClass().getName());
@@ -1502,4 +1505,7 @@ public abstract class BaseSemanticAnalyzer {
   public HashSet<WriteEntity> getAllOutputs() {
     return outputs;
   }
+  public QueryState getQueryState() {
+    return queryState;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
index c069dc4..8e00e0b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
@@ -104,6 +104,7 @@ import org.apache.hadoop.hive.conf.HiveConf.StrictChecks;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryProperties;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.FunctionInfo;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
@@ -217,8 +218,8 @@ public class CalcitePlanner extends SemanticAnalyzer {
   private boolean disableSemJoinReordering = true;
   private EnumSet<ExtendedCBOProfile> profilesCBO;
 
-  public CalcitePlanner(HiveConf conf) throws SemanticException {
-    super(conf);
+  public CalcitePlanner(QueryState queryState) throws SemanticException {
+    super(queryState);
     if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED)) {
       runCBO = false;
       disableSemJoinReordering = false;
@@ -514,18 +515,18 @@ public class CalcitePlanner extends SemanticAnalyzer {
     createTable.addChild(temporary);
     createTable.addChild(cte.cteNode);
 
-    CalcitePlanner analyzer = new CalcitePlanner(conf);
+    CalcitePlanner analyzer = new CalcitePlanner(queryState);
     analyzer.initCtx(ctx);
     analyzer.init(false);
 
     // should share cte contexts
     analyzer.aliasToCTEs.putAll(aliasToCTEs);
 
-    HiveOperation operation = SessionState.get().getHiveOperation();
+    HiveOperation operation = queryState.getHiveOperation();
     try {
       analyzer.analyzeInternal(createTable);
     } finally {
-      SessionState.get().setCommandType(operation);
+      queryState.setCommandType(operation);
     }
 
     Table table = analyzer.tableDesc.toTable(conf);

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
index bb1bbad..3b6cbce 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
@@ -25,15 +25,14 @@ import java.util.Map;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.HiveStatsUtils;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.conf.HiveVariableSource;
 import org.apache.hadoop.hive.conf.VariableSubstitution;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
@@ -62,8 +61,8 @@ public class ColumnStatsSemanticAnalyzer extends 
SemanticAnalyzer {
   private List<String> colType;
   private Table tbl;
 
-  public ColumnStatsSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    super(conf);
+  public ColumnStatsSemanticAnalyzer(QueryState queryState) throws 
SemanticException {
+    super(queryState);
   }
 
   private boolean shouldRewrite(ASTNode tree) {

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 46d2342..04e2a41 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -44,6 +44,7 @@ import org.apache.hadoop.hive.metastore.api.Order;
 import org.apache.hadoop.hive.metastore.api.SkewedInfo;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.ArchiveUtils;
 import org.apache.hadoop.hive.ql.exec.ColumnStatsUpdateTask;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
@@ -229,12 +230,12 @@ public class DDLSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     return typeName;
   }
 
-  public DDLSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    this(conf, createHiveDB(conf));
+  public DDLSemanticAnalyzer(QueryState queryState) throws SemanticException {
+    this(queryState, createHiveDB(queryState.getConf()));
   }
 
-  public DDLSemanticAnalyzer(HiveConf conf, Hive db) throws SemanticException {
-    super(conf, db);
+  public DDLSemanticAnalyzer(QueryState queryState, Hive db) throws 
SemanticException {
+    super(queryState, db);
     reservedPartitionValues = new HashSet<String>();
     // Partition can't have this name
     reservedPartitionValues.add(HiveConf.getVar(conf, 
ConfVars.DEFAULTPARTITIONNAME));
@@ -1350,9 +1351,9 @@ public class DDLSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(0))
         .getChild(0));
     EnvironmentContext environmentContext = null;
-    if (SessionState.get().getCommandType()
+    if (queryState.getCommandType()
         .equals(HiveOperation.ALTERTABLE_UPDATETABLESTATS.getOperationName())
-        || SessionState.get().getCommandType()
+        || queryState.getCommandType()
             
.equals(HiveOperation.ALTERTABLE_UPDATEPARTSTATS.getOperationName())) {
       // we need to check if the properties are valid, especially for stats.
       boolean changeStatsSucceeded = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSQRewriteSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSQRewriteSemanticAnalyzer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSQRewriteSemanticAnalyzer.java
index 6f0f3a6..8d7fd92 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSQRewriteSemanticAnalyzer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSQRewriteSemanticAnalyzer.java
@@ -19,8 +19,8 @@
 
 import java.util.List;
 
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.ExplainSQRewriteTask;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.plan.ExplainSQRewriteWork;
@@ -28,8 +28,8 @@ import org.apache.hadoop.hive.ql.plan.ExplainSQRewriteWork;
 public class ExplainSQRewriteSemanticAnalyzer extends BaseSemanticAnalyzer {
   List<FieldSchema> fieldList;
 
-  public ExplainSQRewriteSemanticAnalyzer(HiveConf conf) throws 
SemanticException {
-    super(conf);
+  public ExplainSQRewriteSemanticAnalyzer(QueryState queryState) throws 
SemanticException {
+    super(queryState);
   }
 
   @SuppressWarnings("unchecked")
@@ -42,7 +42,7 @@ public class ExplainSQRewriteSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     // Create a semantic analyzer for the query
     ASTNode input = (ASTNode) ast.getChild(0);
     SemanticAnalyzer sem = (SemanticAnalyzer)
-        SemanticAnalyzerFactory.get(conf, input);
+        SemanticAnalyzerFactory.get(queryState, input);
     sem.analyze(input, ctx);
     sem.validate();
 

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
index eefc145..75753b0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
@@ -24,6 +24,7 @@ import java.util.List;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.ExplainTask;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.Task;
@@ -37,8 +38,8 @@ import org.apache.hadoop.hive.ql.plan.ExplainWork;
 public class ExplainSemanticAnalyzer extends BaseSemanticAnalyzer {
   List<FieldSchema> fieldList;
 
-  public ExplainSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    super(conf);
+  public ExplainSemanticAnalyzer(QueryState queryState) throws 
SemanticException {
+    super(queryState);
   }
 
   @SuppressWarnings("unchecked")
@@ -70,7 +71,7 @@ public class ExplainSemanticAnalyzer extends 
BaseSemanticAnalyzer {
 
     // Create a semantic analyzer for the query
     ASTNode input = (ASTNode) ast.getChild(0);
-    BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, input);
+    BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(queryState, input);
     sem.analyze(input, ctx);
     sem.validate();
 

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
index fe8147a..475f2c9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
@@ -47,8 +48,8 @@ public class ExportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
 
   private ReplicationSpec replicationSpec;
 
-  public ExportSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    super(conf);
+  public ExportSemanticAnalyzer(QueryState queryState) throws 
SemanticException {
+    super(queryState);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
index be908d3..1ec45ee 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.ResourceType;
 import org.apache.hadoop.hive.metastore.api.ResourceUri;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.FunctionInfo;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.FunctionUtils;
@@ -49,8 +50,8 @@ public class FunctionSemanticAnalyzer extends 
BaseSemanticAnalyzer {
   private static final Logger LOG = LoggerFactory
       .getLogger(FunctionSemanticAnalyzer.class);
 
-  public FunctionSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    super(conf);
+  public FunctionSemanticAnalyzer(QueryState queryState) throws 
SemanticException {
+    super(queryState);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
index 549d24f..a9bc271 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
@@ -47,6 +47,7 @@ import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Order;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -74,8 +75,8 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
 
   public static final String METADATA_NAME="_metadata";
 
-  public ImportSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    super(conf);
+  public ImportSemanticAnalyzer(QueryState queryState) throws 
SemanticException {
+    super(queryState);
   }
 
   private boolean tableExists = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
index b90616f..a49b813 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -58,8 +59,8 @@ import com.google.common.collect.Lists;
  */
 public class LoadSemanticAnalyzer extends BaseSemanticAnalyzer {
 
-  public LoadSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    super(conf);
+  public LoadSemanticAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
   }
 
   public static FileStatus[] matchFilesOrDir(FileSystem fs, Path path)

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
index e394914..fe065f8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
@@ -36,6 +36,7 @@ import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.FunctionUtils;
@@ -44,13 +45,10 @@ import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.lib.PreOrderWalker;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.CreateMacroDesc;
 import org.apache.hadoop.hive.ql.plan.DropMacroDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.FunctionWork;
-import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 
@@ -62,8 +60,8 @@ public class MacroSemanticAnalyzer extends 
BaseSemanticAnalyzer {
   private static final Logger LOG = LoggerFactory
       .getLogger(MacroSemanticAnalyzer.class);
 
-  public MacroSemanticAnalyzer(HiveConf conf) throws SemanticException {
-    super(conf);
+  public MacroSemanticAnalyzer(QueryState queryState) throws SemanticException 
{
+    super(queryState);
   }
 
   @Override
@@ -132,8 +130,8 @@ public class MacroSemanticAnalyzer extends 
BaseSemanticAnalyzer {
       throw new SemanticException("At least one parameter name was used more 
than once "
           + macroColNames);
     }
-    SemanticAnalyzer sa = HiveConf.getBoolVar(conf, 
HiveConf.ConfVars.HIVE_CBO_ENABLED) ? new CalcitePlanner(
-        conf) : new SemanticAnalyzer(conf);
+    SemanticAnalyzer sa = HiveConf.getBoolVar(conf, 
HiveConf.ConfVars.HIVE_CBO_ENABLED) ?
+        new CalcitePlanner(queryState) : new SemanticAnalyzer(queryState);
     ;
     ExprNodeDesc body;
     if(isNoArgumentMacro) {

http://git-wip-us.apache.org/repos/asf/hive/blob/caa3ec76/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
index 1bccf20..5d0f905 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
@@ -28,6 +28,7 @@ import java.util.Set;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.QueryProperties;
+import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.JoinOperator;
@@ -78,6 +79,7 @@ public class ParseContext {
   private List<LoadTableDesc> loadTableWork;
   private List<LoadFileDesc> loadFileWork;
   private Context ctx;
+  private QueryState queryState;
   private HiveConf conf;
   private HashMap<String, String> idToTableNameMap;
   private int destTableId;
@@ -153,7 +155,7 @@ public class ParseContext {
    * @param rootTasks
    */
   public ParseContext(
-      HiveConf conf,
+      QueryState queryState,
       HashMap<TableScanOperator, ExprNodeDesc> opToPartPruner,
       HashMap<TableScanOperator, PrunedPartitionList> opToPartList,
       HashMap<String, TableScanOperator> topOps,
@@ -173,7 +175,8 @@ public class ParseContext {
       List<ReduceSinkOperator> 
reduceSinkOperatorsAddedByEnforceBucketingSorting,
       AnalyzeRewriteContext analyzeRewrite, CreateTableDesc createTableDesc,
       QueryProperties queryProperties, Map<SelectOperator, Table> 
viewProjectToTableSchema) {
-    this.conf = conf;
+    this.queryState = queryState;
+    this.conf = queryState.getConf();
     this.opToPartPruner = opToPartPruner;
     this.opToPartList = opToPartList;
     this.joinOps = joinOps;
@@ -241,6 +244,13 @@ public class ParseContext {
   }
 
   /**
+   * @return the hive conf
+   */
+  public QueryState getQueryState() {
+    return queryState;
+  }
+
+  /**
    * @return the opToPartPruner
    */
   public HashMap<TableScanOperator, ExprNodeDesc> getOpToPartPruner() {

Reply via email to