Author: jlowe Date: Thu Jun 13 21:35:34 2013 New Revision: 1492877 URL: http://svn.apache.org/r1492877 Log: MAPREDUCE-4019. -list-attempt-ids is not working. Contributed by Ashwin Shankar, Devaraj K, and B Anil Kumar
Added: hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/tools/ hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/tools/TestCLI.java Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/CHANGES.txt hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-mapreduce-project/CHANGES.txt?rev=1492877&r1=1492876&r2=1492877&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-mapreduce-project/CHANGES.txt (original) +++ hadoop/common/branches/branch-2/hadoop-mapreduce-project/CHANGES.txt Thu Jun 13 21:35:34 2013 @@ -401,6 +401,9 @@ Release 2.1.0-beta - UNRELEASED MAPREDUCE-5259. TestTaskLog fails on Windows because of path separators missmatch. (Ivan Mitic via cnauroth) + MAPREDUCE-4019. -list-attempt-ids is not working (Ashwin Shankar, + Devaraj K, and B Anil Kumar via jlowe) + BREAKDOWN OF HADOOP-8562 SUBTASKS MAPREDUCE-4739. Some MapReduce tests fail to find winutils. @@ -1028,6 +1031,9 @@ Release 0.23.9 - UNRELEASED MAPREDUCE-5315. DistCp reports success even on failure. (mithun and jlowe via daryn) + MAPREDUCE-4019. -list-attempt-ids is not working (Ashwin Shankar, + Devaraj K, and B Anil Kumar via jlowe) + Release 0.23.8 - 2013-06-05 INCOMPATIBLE CHANGES Modified: hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java?rev=1492877&r1=1492876&r2=1492877&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java (original) +++ hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java Thu Jun 13 21:35:34 2013 @@ -21,8 +21,12 @@ import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; +import java.util.Set; +import java.util.HashSet; +import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -62,6 +66,8 @@ import com.google.common.base.Charsets; public class CLI extends Configured implements Tool { private static final Log LOG = LogFactory.getLog(CLI.class); protected Cluster cluster; + private static final Set<String> taskTypes = new HashSet<String>( + Arrays.asList("MAP", "REDUCE")); public CLI() { } @@ -214,6 +220,11 @@ public class CLI extends Configured impl taskType = argv[2]; taskState = argv[3]; displayTasks = true; + if (!taskTypes.contains(taskType.toUpperCase())) { + System.out.println("Error: Invalid task-type: "+taskType); + displayUsage(cmd); + return exitCode; + } } else if ("-logs".equals(cmd)) { if (argv.length == 2 || argv.length ==3) { logs = true; @@ -233,7 +244,7 @@ public class CLI extends Configured impl } // initialize cluster - cluster = new Cluster(getConf()); + cluster = createCluster(); // Submit the request try { @@ -366,6 +377,10 @@ public class CLI extends Configured impl return exitCode; } + Cluster createCluster() throws IOException { + return new Cluster(getConf()); + } + private String getJobPriorityNames() { StringBuffer sb = new StringBuffer(); for (JobPriority p : JobPriority.values()) { @@ -374,12 +389,8 @@ public class CLI extends Configured impl return sb.substring(0, sb.length()-1); } - private String getTaskTypess() { - StringBuffer sb = new StringBuffer(); - for (TaskType t : TaskType.values()) { - sb.append(t.name()).append(" "); - } - return sb.substring(0, sb.length()-1); + private String getTaskTypes() { + return StringUtils.join(taskTypes, " "); } /** @@ -388,8 +399,8 @@ public class CLI extends Configured impl private void displayUsage(String cmd) { String prefix = "Usage: CLI "; String jobPriorityValues = getJobPriorityNames(); - String taskTypes = getTaskTypess(); String taskStates = "running, completed"; + if ("-submit".equals(cmd)) { System.err.println(prefix + "[" + cmd + " <job-file>]"); } else if ("-status".equals(cmd) || "-kill".equals(cmd)) { @@ -417,7 +428,7 @@ public class CLI extends Configured impl } else if ("-list-attempt-ids".equals(cmd)) { System.err.println(prefix + "[" + cmd + " <job-id> <task-type> <task-state>]. " + - "Valid values for <task-type> are " + taskTypes + ". " + + "Valid values for <task-type> are " + getTaskTypes() + ". " + "Valid values for <task-state> are " + taskStates); } else if ("-logs".equals(cmd)) { System.err.println(prefix + "[" + cmd + @@ -438,7 +449,7 @@ public class CLI extends Configured impl System.err.printf("\t[-list-blacklisted-trackers]%n"); System.err.println("\t[-list-attempt-ids <job-id> <task-type> " + "<task-state>]. " + - "Valid values for <task-type> are " + taskTypes + ". " + + "Valid values for <task-type> are " + getTaskTypes() + ". " + "Valid values for <task-state> are " + taskStates); System.err.printf("\t[-kill-task <task-attempt-id>]%n"); System.err.printf("\t[-fail-task <task-attempt-id>]%n"); @@ -555,7 +566,7 @@ public class CLI extends Configured impl */ protected void displayTasks(Job job, String type, String state) throws IOException, InterruptedException { - TaskReport[] reports = job.getTaskReports(TaskType.valueOf(type)); + TaskReport[] reports = job.getTaskReports(TaskType.valueOf(type.toUpperCase())); for (TaskReport report : reports) { TIPStatus status = report.getCurrentStatus(); if ((state.equals("pending") && status ==TIPStatus.PENDING) || Added: hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/tools/TestCLI.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/tools/TestCLI.java?rev=1492877&view=auto ============================================================================== --- hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/tools/TestCLI.java (added) +++ hadoop/common/branches/branch-2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/tools/TestCLI.java Thu Jun 13 21:35:34 2013 @@ -0,0 +1,95 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.mapreduce.tools; + +import static org.junit.Assert.*; + +import org.apache.hadoop.mapreduce.Cluster; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.JobID; +import org.apache.hadoop.mapreduce.TaskReport; +import org.apache.hadoop.mapreduce.TaskType; +import org.junit.Test; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.doReturn; + +public class TestCLI { + private static String jobIdStr = "job_1015298225799_0015"; + + @Test + public void testListAttemptIdsWithValidInput() throws Exception { + JobID jobId = JobID.forName(jobIdStr); + Cluster mockCluster = mock(Cluster.class); + Job job = mock(Job.class); + CLI cli = spy(new CLI()); + + doReturn(mockCluster).when(cli).createCluster(); + when(job.getTaskReports(TaskType.MAP)).thenReturn( + getTaskReports(jobId, TaskType.MAP)); + when(job.getTaskReports(TaskType.REDUCE)).thenReturn( + getTaskReports(jobId, TaskType.REDUCE)); + when(mockCluster.getJob(jobId)).thenReturn(job); + + int retCode_MAP = cli.run(new String[] { "-list-attempt-ids", jobIdStr, + "MAP", "running" }); + // testing case insensitive behavior + int retCode_map = cli.run(new String[] { "-list-attempt-ids", jobIdStr, + "map", "running" }); + + int retCode_REDUCE = cli.run(new String[] { "-list-attempt-ids", jobIdStr, + "REDUCE", "running" }); + + assertEquals("MAP is a valid input,exit code should be 0", 0, retCode_MAP); + assertEquals("map is a valid input,exit code should be 0", 0, retCode_map); + assertEquals("REDUCE is a valid input,exit code should be 0", 0, + retCode_REDUCE); + + verify(job, times(2)).getTaskReports(TaskType.MAP); + verify(job, times(1)).getTaskReports(TaskType.REDUCE); + } + + @Test + public void testListAttemptIdsWithInvalidInputs() throws Exception { + JobID jobId = JobID.forName(jobIdStr); + Cluster mockCluster = mock(Cluster.class); + Job job = mock(Job.class); + CLI cli = spy(new CLI()); + + doReturn(mockCluster).when(cli).createCluster(); + when(mockCluster.getJob(jobId)).thenReturn(job); + + int retCode_JOB_SETUP = cli.run(new String[] { "-list-attempt-ids", + jobIdStr, "JOB_SETUP", "running" }); + int retCode_JOB_CLEANUP = cli.run(new String[] { "-list-attempt-ids", + jobIdStr, "JOB_CLEANUP", "running" }); + + assertEquals("JOB_SETUP is a invalid input,exit code should be -1", -1, + retCode_JOB_SETUP); + assertEquals("JOB_CLEANUP is a invalid input,exit code should be -1", -1, + retCode_JOB_CLEANUP); + + } + + private TaskReport[] getTaskReports(JobID jobId, TaskType type) { + return new TaskReport[] { new TaskReport(), new TaskReport() }; + } +}