Repository: hive Updated Branches: refs/heads/master 72cea13e4 -> fb35bae5a
HIVE-14109: query execuction throws NPE when hive.exec.submitviachild is set to true (Aihua Xu, reviewed by Sergio Peña) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fb35bae5 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fb35bae5 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fb35bae5 Branch: refs/heads/master Commit: fb35bae5ae2fad93de3deef9023f52cba8e4783b Parents: 72cea13 Author: Aihua Xu <aihu...@apache.org> Authored: Sat Jun 25 21:24:37 2016 -0400 Committer: Aihua Xu <aihu...@apache.org> Committed: Mon Jul 4 11:24:39 2016 -0400 ---------------------------------------------------------------------- .../apache/hadoop/hive/ql/exec/mr/ExecDriver.java | 8 +++++--- .../hive/ql/exec/mr/HadoopJobExecHelper.java | 18 ++++++++---------- .../hadoop/hive/ql/exec/mr/MapredLocalTask.java | 2 +- .../hadoop/hive/ql/io/merge/MergeFileTask.java | 2 +- .../hive/ql/io/rcfile/stats/PartialScanTask.java | 2 +- .../ql/io/rcfile/truncate/ColumnTruncateTask.java | 2 +- 6 files changed, 17 insertions(+), 17 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hive/blob/fb35bae5/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java index 4a642db..285f9ad 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java @@ -126,7 +126,8 @@ public class ExecDriver extends Task<MapredWork> implements Serializable, Hadoop public ExecDriver() { super(); console = new LogHelper(LOG); - this.jobExecHelper = new HadoopJobExecHelper(queryState, job, console, this, this); + job = new JobConf(ExecDriver.class); + this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this); } @Override @@ -175,7 +176,7 @@ public class ExecDriver extends Task<MapredWork> implements Serializable, Hadoop initializeFiles("tmparchives", getResource(conf, SessionState.ResourceType.ARCHIVE)); conf.stripHiddenConfigurations(job); - this.jobExecHelper = new HadoopJobExecHelper(queryState, job, console, this, this); + this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this); } /** @@ -185,7 +186,7 @@ public class ExecDriver extends Task<MapredWork> implements Serializable, Hadoop setWork(plan); this.job = job; console = new LogHelper(LOG, isSilent); - this.jobExecHelper = new HadoopJobExecHelper(queryState, job, console, this, this); + this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this); } /** @@ -671,6 +672,7 @@ public class ExecDriver extends Task<MapredWork> implements Serializable, Hadoop String queryId = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID, "").trim(); if(queryId.isEmpty()) { queryId = "unknown-" + System.currentTimeMillis(); + HiveConf.setVar(conf, HiveConf.ConfVars.HIVEQUERYID, queryId); } System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), queryId); http://git-wip-us.apache.org/repos/asf/hive/blob/fb35bae5/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java index 5656f9a..cfb4a28 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java @@ -34,7 +34,6 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.MapRedStats; -import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskHandle; @@ -78,7 +77,7 @@ public class HadoopJobExecHelper { public transient JobID jobId; private final LogHelper console; private final HadoopJobExecHook callBackObj; - private final QueryState queryState; + private final String queryId; /** * Update counters relevant to this task. @@ -139,9 +138,9 @@ public class HadoopJobExecHelper { this.jobId = jobId; } - public HadoopJobExecHelper(QueryState queryState, JobConf job, LogHelper console, + public HadoopJobExecHelper(JobConf job, LogHelper console, Task<? extends Serializable> task, HadoopJobExecHook hookCallBack) { - this.queryState = queryState; + this.queryId = HiveConf.getVar(job, HiveConf.ConfVars.HIVEQUERYID, "unknown-" + System.currentTimeMillis()); this.job = job; this.console = console; this.task = task; @@ -259,7 +258,6 @@ public class HadoopJobExecHelper { String logMapper; String logReducer; - String queryId = queryState.getQueryId(); TaskReport[] mappers = jc.getMapTaskReports(rj.getID()); if (mappers == null) { logMapper = "no information for number of mappers; "; @@ -364,11 +362,11 @@ public class HadoopJobExecHelper { String output = report.toString(); SessionState ss = SessionState.get(); if (ss != null) { - ss.getHiveHistory().setTaskCounters(queryState.getQueryId(), getId(), ctrs); - ss.getHiveHistory().setTaskProperty(queryState.getQueryId(), getId(), + ss.getHiveHistory().setTaskCounters(queryId, getId(), ctrs); + ss.getHiveHistory().setTaskProperty(queryId, getId(), Keys.TASK_HADOOP_PROGRESS, output); if (ss.getConf().getBoolVar(HiveConf.ConfVars.HIVE_LOG_INCREMENTAL_PLAN_PROGRESS)) { - ss.getHiveHistory().progressTask(queryState.getQueryId(), this.task); + ss.getHiveHistory().progressTask(queryId, this.task); this.callBackObj.logPlanProgress(ss); } } @@ -397,7 +395,7 @@ public class HadoopJobExecHelper { } else { SessionState ss = SessionState.get(); if (ss != null) { - ss.getHiveHistory().setTaskCounters(queryState.getQueryId(), getId(), ctrs); + ss.getHiveHistory().setTaskCounters(queryId, getId(), ctrs); } success = rj.isSuccessful(); } @@ -441,7 +439,7 @@ public class HadoopJobExecHelper { console.printInfo("Job running in-process (local Hadoop)"); } else { if (SessionState.get() != null) { - SessionState.get().getHiveHistory().setTaskProperty(queryState.getQueryId(), + SessionState.get().getHiveHistory().setTaskProperty(queryId, getId(), Keys.TASK_HADOOP_ID, rj.getID().toString()); } console.printInfo(getJobStartMsg(rj.getID()) + ", Tracking URL = " http://git-wip-us.apache.org/repos/asf/hive/blob/fb35bae5/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java index a26d2b8..23a13d6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java @@ -128,7 +128,7 @@ public class MapredLocalTask extends Task<MapredLocalWork> implements Serializab job = new JobConf(conf, ExecDriver.class); execContext = new ExecMapperContext(job); //we don't use the HadoopJobExecHooks for local tasks - this.jobExecHelper = new HadoopJobExecHelper(queryState, job, console, this, null); + this.jobExecHelper = new HadoopJobExecHelper(job, console, this, null); } public static String now() { http://git-wip-us.apache.org/repos/asf/hive/blob/fb35bae5/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java index 0fedd48..376bab2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java @@ -65,7 +65,7 @@ public class MergeFileTask extends Task<MergeFileWork> implements Serializable, DriverContext driverContext, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, driverContext, opContext); job = new JobConf(conf, MergeFileTask.class); - jobExecHelper = new HadoopJobExecHelper(queryState, job, this.console, this, this); + jobExecHelper = new HadoopJobExecHelper(job, this.console, this, this); } @Override http://git-wip-us.apache.org/repos/asf/hive/blob/fb35bae5/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java index 6771b3e..6131581 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java @@ -89,7 +89,7 @@ public class PartialScanTask extends Task<PartialScanWork> implements DriverContext driverContext, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, driverContext, opContext); job = new JobConf(conf, PartialScanTask.class); - jobExecHelper = new HadoopJobExecHelper(queryState, job, this.console, this, this); + jobExecHelper = new HadoopJobExecHelper(job, this.console, this, this); } @Override http://git-wip-us.apache.org/repos/asf/hive/blob/fb35bae5/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java index ffc6311..2d29afc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java @@ -64,7 +64,7 @@ public class ColumnTruncateTask extends Task<ColumnTruncateWork> implements Seri DriverContext driverContext, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, driverContext, opContext); job = new JobConf(conf, ColumnTruncateTask.class); - jobExecHelper = new HadoopJobExecHelper(queryState, job, this.console, this, this); + jobExecHelper = new HadoopJobExecHelper(job, this.console, this, this); } @Override