HIVE-17835: HS2 Logs print unnecessary stack trace when HoS query is cancelled 
(Sahil Takiar, reviewed by Chao Sun)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e33edd96
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e33edd96
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e33edd96

Branch: refs/heads/standalone-metastore
Commit: e33edd9649ce05495396a2183b1be3d1a79fd0d3
Parents: 717ef18
Author: Sahil Takiar <takiar.sa...@gmail.com>
Authored: Fri Feb 9 14:49:38 2018 -0800
Committer: Sahil Takiar <stak...@cloudera.com>
Committed: Fri Feb 9 14:49:38 2018 -0800

----------------------------------------------------------------------
 .../hadoop/hive/ql/exec/spark/SparkTask.java     |  4 +++-
 .../exec/spark/status/RemoteSparkJobMonitor.java | 19 +++++++++++--------
 2 files changed, 14 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/e33edd96/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
index c6e17b5..62daaaa 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
@@ -117,6 +117,7 @@ public class SparkTask extends Task<SparkWork> {
       perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.SPARK_SUBMIT_JOB);
 
       if (driverContext.isShutdown()) {
+        LOG.warn("Killing Spark job");
         killJob();
         throw new HiveException("Operation is cancelled.");
       }
@@ -337,7 +338,7 @@ public class SparkTask extends Task<SparkWork> {
       try {
         jobRef.cancelJob();
       } catch (Exception e) {
-        LOG.warn("failed to kill job", e);
+        LOG.warn("Failed to kill Spark job", e);
       }
     }
   }
@@ -424,6 +425,7 @@ public class SparkTask extends Task<SparkWork> {
           if ((error instanceof InterruptedException) ||
               (error instanceof HiveException &&
               error.getCause() instanceof InterruptedException)) {
+            LOG.info("Killing Spark job since query was interrupted");
             killJob();
           }
           HiveException he;

http://git-wip-us.apache.org/repos/asf/hive/blob/e33edd96/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java
index 6c7aca7..4c4ce55 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java
@@ -184,16 +184,19 @@ public class RemoteSparkJobMonitor extends 
SparkJobMonitor {
         }
       } catch (Exception e) {
         Exception finalException = e;
-        if (e instanceof InterruptedException) {
+        if (e instanceof InterruptedException ||
+                (e instanceof HiveException && e.getCause() instanceof 
InterruptedException)) {
           finalException = new HiveException(e, 
ErrorMsg.SPARK_JOB_INTERRUPTED);
+          LOG.warn("Interrupted while monitoring the Hive on Spark 
application, exiting");
+        } else {
+          String msg = " with exception '" + Utilities.getNameMessage(e) + "'";
+          msg = "Failed to monitor Job[" + sparkJobStatus.getJobId() + "]" + 
msg;
+
+          // Has to use full name to make sure it does not conflict with
+          // org.apache.commons.lang.StringUtils
+          LOG.error(msg, e);
+          console.printError(msg, "\n" + 
org.apache.hadoop.util.StringUtils.stringifyException(e));
         }
-        String msg = " with exception '" + Utilities.getNameMessage(e) + "'";
-        msg = "Failed to monitor Job[" + sparkJobStatus.getJobId() + "]" + msg;
-
-        // Has to use full name to make sure it does not conflict with
-        // org.apache.commons.lang.StringUtils
-        LOG.error(msg, e);
-        console.printError(msg, "\n" + 
org.apache.hadoop.util.StringUtils.stringifyException(e));
         rc = 1;
         done = true;
         sparkJobStatus.setError(finalException);

Reply via email to