Author: jvs
Date: Fri Nov 18 20:29:28 2011
New Revision: 1203819

URL: http://svn.apache.org/viewvc?rev=1203819&view=rev
Log:
HIVE-2569 [jira] Too much debugging info on console if a job failed
(Ning Zhang via John Sichi)

Summary:
Too much debugging info on console if a job failed

When a job failed and Hive client tries to get the error message from failed
task, it printed the following info on console for each task:   

Examining task ID: task_201110112120_773499_m_000037 from job
job_201110112120_773499   

This should be shorten significantly.

Test Plan: EMPTY

Reviewers: JIRA, njain, jsichi

Reviewed By: jsichi

CC: jsichi

Differential Revision: 375

Modified:
    
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java?rev=1203819&r1=1203818&r2=1203819&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java 
(original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java 
Fri Nov 18 20:29:28 2011
@@ -24,12 +24,12 @@ import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Calendar;
 import java.util.Collections;
+import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.Enumeration;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -45,12 +45,12 @@ import org.apache.hadoop.hive.ql.session
 import org.apache.hadoop.hive.ql.stats.ClientStatsPublisher;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.Counters;
+import org.apache.hadoop.mapred.Counters.Counter;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.TaskCompletionEvent;
 import org.apache.hadoop.mapred.TaskReport;
-import org.apache.hadoop.mapred.Counters.Counter;
 import org.apache.log4j.Appender;
 import org.apache.log4j.FileAppender;
 import org.apache.log4j.LogManager;
@@ -310,7 +310,7 @@ public class HadoopJobExecHelper {
       errMsg.setLength(0);
 
       updateCounters(ctrs, rj);
-      
+
       // Prepare data for Client Stat Publishers (if any present) and execute 
them
       if (clientStatPublishers.size() > 0 && ctrs != null) {
         Map<String, Double> exctractedCounters = extractAllCounterValues(ctrs);
@@ -510,6 +510,7 @@ public class HadoopJobExecHelper {
       }
 
       boolean more = true;
+      boolean firstError = true;
       for (TaskCompletionEvent t : taskCompletions) {
         // getTaskJobIDs returns Strings for compatibility with Hadoop versions
         // without TaskID or TaskAttemptID
@@ -525,7 +526,10 @@ public class HadoopJobExecHelper {
         // and the logs
         String taskId = taskJobIds[0];
         String jobId = taskJobIds[1];
-        console.printError("Examining task ID: " + taskId + " from job " + 
jobId);
+        if (firstError) {
+          console.printError("Examining task ID: " + taskId + " (and more) 
from job " + jobId);
+          firstError = false;
+        }
 
         TaskInfo ti = taskIdToInfo.get(taskId);
         if (ti == null) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java?rev=1203819&r1=1203818&r2=1203819&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java Fri 
Nov 18 20:29:28 2011
@@ -19,10 +19,6 @@
 package org.apache.hadoop.hive.ql.exec;
 
 import java.io.IOException;
-import java.io.Serializable;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
@@ -30,31 +26,26 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.exec.Operator.ProgressCounter;
 import org.apache.hadoop.hive.ql.exec.errors.ErrorAndSolution;
 import org.apache.hadoop.hive.ql.exec.errors.TaskLogProcessor;
-import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
-import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.shims.ShimLoader;
-import org.apache.hadoop.mapred.Counters;
-import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.TaskCompletionEvent;
-import org.apache.hadoop.mapred.TaskReport;
 
 /**
  * JobDebugger takes a RunningJob that has failed and grabs the top 4 failing
  * tasks and outputs this information to the Hive CLI.
  */
 public class JobDebugger implements Runnable {
-  private JobConf conf;
-  private RunningJob rj;
-  private LogHelper console;
-  private Map<String, Integer> failures = new HashMap<String, Integer>(); // 
Mapping from task ID to the number of failures
-  private Set<String> successes = new HashSet<String>(); // Successful task 
ID's
-  private Map<String, TaskInfo> taskIdToInfo = new HashMap<String, TaskInfo>();
+  private final JobConf conf;
+  private final RunningJob rj;
+  private final LogHelper console;
+  // Mapping from task ID to the number of failures
+  private final Map<String, Integer> failures = new HashMap<String, Integer>();
+  private final Set<String> successes = new HashSet<String>(); // Successful 
task ID's
+  private final Map<String, TaskInfo> taskIdToInfo = new HashMap<String, 
TaskInfo>();
 
   // Used for showJobFailDebugInfo
   private static class TaskInfo {
@@ -116,6 +107,7 @@ public class JobDebugger implements Runn
         }
 
         boolean more = true;
+        boolean firstError = true;
         for (TaskCompletionEvent t : taskCompletions) {
           // getTaskJobIDs returns Strings for compatibility with Hadoop 
versions
           // without TaskID or TaskAttemptID
@@ -131,7 +123,10 @@ public class JobDebugger implements Runn
           // and the logs
           String taskId = taskJobIds[0];
           String jobId = taskJobIds[1];
-          console.printError("Examining task ID: " + taskId + " from job " + 
jobId);
+          if (firstError) {
+            console.printError("Examining task ID: " + taskId + " (and more) 
from job " + jobId);
+            firstError = false;
+          }
 
           TaskInfo ti = taskIdToInfo.get(taskId);
           if (ti == null) {
@@ -181,7 +176,7 @@ public class JobDebugger implements Runn
       console.printError("Timed out trying to finish grabbing task log URLs, "
           + "some task info may be missing");
     }
-    
+
     // Remove failures for tasks that succeeded
     for (String task : successes) {
       failures.remove(task);


Reply via email to