Author: jlowe Date: Fri May 17 20:27:47 2013 New Revision: 1483978 URL: http://svn.apache.org/r1483978 Log: svn merge -c 1483974 FIXES: MAPREDUCE-4927. Historyserver 500 error due to NPE when accessing specific counters page for failed job. Contributed by Ashwin Shankar
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/SingleCounterBlock.java hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt?rev=1483978&r1=1483977&r2=1483978&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt (original) +++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt Fri May 17 20:27:47 2013 @@ -34,6 +34,9 @@ Release 0.23.8 - UNRELEASED MAPREDUCE-5168. Reducer can OOM during shuffle because on-disk output stream not released (jlowe) + MAPREDUCE-4927. Historyserver 500 error due to NPE when accessing specific + counters page for failed job. (Ashwin Shankar via jlowe) + Release 0.23.7 - 2013-04-18 INCOMPATIBLE CHANGES Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/SingleCounterBlock.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/SingleCounterBlock.java?rev=1483978&r1=1483977&r2=1483978&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/SingleCounterBlock.java (original) +++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/SingleCounterBlock.java Fri May 17 20:27:47 2013 @@ -143,8 +143,9 @@ public class SingleCounterBlock extends Map<TaskId, Task> tasks = job.getTasks(); for(Map.Entry<TaskId, Task> entry : tasks.entrySet()) { long value = 0; - CounterGroup group = entry.getValue().getCounters() - .getGroup($(COUNTER_GROUP)); + Counters counters = entry.getValue().getCounters(); + CounterGroup group = (counters != null) ? counters + .getGroup($(COUNTER_GROUP)) : null; if(group != null) { Counter c = group.findCounter($(COUNTER_NAME)); if(c != null) { Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java?rev=1483978&r1=1483977&r2=1483978&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java (original) +++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java Fri May 17 20:27:47 2013 @@ -182,6 +182,11 @@ public class TestAMWebApp { @Test public void testSingleCounterView() { AppContext appContext = new TestAppContext(); + Job job = appContext.getAllJobs().values().iterator().next(); + // add a failed task to the job without any counters + Task failedTask = MockJobs.newTask(job.getID(), 2, 1, true); + Map<TaskId,Task> tasks = job.getTasks(); + tasks.put(failedTask.getID(), failedTask); Map<String, String> params = getJobParams(appContext); params.put(AMParams.COUNTER_GROUP, "org.apache.hadoop.mapreduce.FileSystemCounter");