Author: vinodkv Date: Tue Jan 21 18:59:18 2014 New Revision: 1560145 URL: http://svn.apache.org/r1560145 Log: YARN-321. Forwarding YARN-321 branch to latest trunk.
Modified: hadoop/common/branches/YARN-321/hadoop-mapreduce-project/ (props changed) hadoop/common/branches/YARN-321/hadoop-mapreduce-project/CHANGES.txt (contents, props changed) hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java Propchange: hadoop/common/branches/YARN-321/hadoop-mapreduce-project/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project:r1559250-1560143 Modified: hadoop/common/branches/YARN-321/hadoop-mapreduce-project/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-mapreduce-project/CHANGES.txt?rev=1560145&r1=1560144&r2=1560145&view=diff ============================================================================== --- hadoop/common/branches/YARN-321/hadoop-mapreduce-project/CHANGES.txt (original) +++ hadoop/common/branches/YARN-321/hadoop-mapreduce-project/CHANGES.txt Tue Jan 21 18:59:18 2014 @@ -146,6 +146,8 @@ Trunk (Unreleased) MAPREDUCE-5191. TestQueue#testQueue fails with timeout on Windows. (Ivan Mitic via hitesh) + MAPREDUCE-5717. Task pings are interpreted as task progress (jlowe) + Release 2.4.0 - UNRELEASED INCOMPATIBLE CHANGES @@ -201,6 +203,9 @@ Release 2.4.0 - UNRELEASED MAPREDUCE-5672. Provide optional RollingFileAppender for container log4j (syslog) (Gera Shegalov via jlowe) + MAPREDUCE-5725. Make explicit that TestNetworkedJob relies on the Capacity + Scheduler (Sandy Ryza) + OPTIMIZATIONS MAPREDUCE-5484. YarnChild unnecessarily loads job conf twice (Sandy Ryza) @@ -278,6 +283,8 @@ Release 2.4.0 - UNRELEASED MAPREDUCE-5724. JobHistoryServer does not start if HDFS is not running. (tucu) + MAPREDUCE-5729. mapred job -list throws NPE (kasha) + Release 2.3.0 - UNRELEASED INCOMPATIBLE CHANGES Propchange: hadoop/common/branches/YARN-321/hadoop-mapreduce-project/CHANGES.txt ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt:r1559250-1560143 Modified: hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java?rev=1560145&r1=1560144&r2=1560145&view=diff ============================================================================== --- hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java (original) +++ hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java Tue Jan 21 18:59:18 2014 @@ -361,7 +361,6 @@ public class TaskAttemptListenerImpl ext if (taskStatus == null) { //We are using statusUpdate only as a simple ping LOG.info("Ping from " + taskAttemptID.toString()); - taskHeartbeatHandler.progressing(yarnAttemptID); return feedback; } Modified: hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java?rev=1560145&r1=1560144&r2=1560145&view=diff ============================================================================== --- hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java (original) +++ hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java Tue Jan 21 18:59:18 2014 @@ -381,4 +381,50 @@ public class TestTaskAttemptListenerImpl } + @SuppressWarnings("rawtypes") + @Test + public void testStatusUpdateProgress() + throws IOException, InterruptedException { + AppContext appCtx = mock(AppContext.class); + JobTokenSecretManager secret = mock(JobTokenSecretManager.class); + RMHeartbeatHandler rmHeartbeatHandler = + mock(RMHeartbeatHandler.class); + TaskHeartbeatHandler hbHandler = mock(TaskHeartbeatHandler.class); + Dispatcher dispatcher = mock(Dispatcher.class); + EventHandler ea = mock(EventHandler.class); + when(dispatcher.getEventHandler()).thenReturn(ea); + + when(appCtx.getEventHandler()).thenReturn(ea); + CheckpointAMPreemptionPolicy policy = new CheckpointAMPreemptionPolicy(); + policy.init(appCtx); + MockTaskAttemptListenerImpl listener = + new MockTaskAttemptListenerImpl(appCtx, secret, + rmHeartbeatHandler, hbHandler, policy); + Configuration conf = new Configuration(); + listener.init(conf); + listener.start(); + JVMId id = new JVMId("foo",1, true, 1); + WrappedJvmID wid = new WrappedJvmID(id.getJobId(), id.isMap, id.getId()); + + TaskAttemptID attemptID = new TaskAttemptID("1", 1, TaskType.MAP, 1, 1); + TaskAttemptId attemptId = TypeConverter.toYarn(attemptID); + Task task = mock(Task.class); + listener.registerPendingTask(task, wid); + listener.registerLaunchedTask(attemptId, wid); + verify(hbHandler).register(attemptId); + + // make sure a ping doesn't report progress + AMFeedback feedback = listener.statusUpdate(attemptID, null); + assertTrue(feedback.getTaskFound()); + verify(hbHandler, never()).progressing(eq(attemptId)); + + // make sure a status update does report progress + MapTaskStatus mockStatus = new MapTaskStatus(attemptID, 0.0f, 1, + TaskStatus.State.RUNNING, "", "RUNNING", "", TaskStatus.Phase.MAP, + new Counters()); + feedback = listener.statusUpdate(attemptID, mockStatus); + assertTrue(feedback.getTaskFound()); + verify(hbHandler).progressing(eq(attemptId)); + listener.close(); + } } Modified: hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java?rev=1560145&r1=1560144&r2=1560145&view=diff ============================================================================== --- hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java (original) +++ hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java Tue Jan 21 18:59:18 2014 @@ -43,6 +43,7 @@ import org.apache.hadoop.mapreduce.v2.ap import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; +import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.QueueACL; @@ -445,11 +446,18 @@ public class TypeConverter { jobStatus.setStartTime(application.getStartTime()); jobStatus.setFinishTime(application.getFinishTime()); jobStatus.setFailureInfo(application.getDiagnostics()); - jobStatus.setNeededMem(application.getApplicationResourceUsageReport().getNeededResources().getMemory()); - jobStatus.setNumReservedSlots(application.getApplicationResourceUsageReport().getNumReservedContainers()); - jobStatus.setNumUsedSlots(application.getApplicationResourceUsageReport().getNumUsedContainers()); - jobStatus.setReservedMem(application.getApplicationResourceUsageReport().getReservedResources().getMemory()); - jobStatus.setUsedMem(application.getApplicationResourceUsageReport().getUsedResources().getMemory()); + ApplicationResourceUsageReport resourceUsageReport = + application.getApplicationResourceUsageReport(); + if (resourceUsageReport != null) { + jobStatus.setNeededMem( + resourceUsageReport.getNeededResources().getMemory()); + jobStatus.setNumReservedSlots( + resourceUsageReport.getNumReservedContainers()); + jobStatus.setNumUsedSlots(resourceUsageReport.getNumUsedContainers()); + jobStatus.setReservedMem( + resourceUsageReport.getReservedResources().getMemory()); + jobStatus.setUsedMem(resourceUsageReport.getUsedResources().getMemory()); + } return jobStatus; } Modified: hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java?rev=1560145&r1=1560144&r2=1560145&view=diff ============================================================================== --- hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java (original) +++ hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java Tue Jan 21 18:59:18 2014 @@ -23,8 +23,6 @@ import static org.mockito.Mockito.when; import java.util.ArrayList; import java.util.List; -import junit.framework.Assert; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobStatus.State; import org.apache.hadoop.mapreduce.v2.api.records.JobId; @@ -40,6 +38,7 @@ import org.apache.hadoop.yarn.api.record import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.util.Records; +import org.junit.Assert; import org.junit.Test; import org.mockito.Mockito; @@ -112,6 +111,14 @@ public class TestTypeConverter { when(mockReport.getUser()).thenReturn("dummy-user"); when(mockReport.getQueue()).thenReturn("dummy-queue"); String jobFile = "dummy-path/job.xml"; + + try { + JobStatus status = TypeConverter.fromYarn(mockReport, jobFile); + } catch (NullPointerException npe) { + Assert.fail("Type converstion from YARN fails for jobs without " + + "ApplicationUsageReport"); + } + ApplicationResourceUsageReport appUsageRpt = Records .newRecord(ApplicationResourceUsageReport.class); Resource r = Records.newRecord(Resource.class); Modified: hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java?rev=1560145&r1=1560144&r2=1560145&view=diff ============================================================================== --- hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java (original) +++ hadoop/common/branches/YARN-321/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java Tue Jan 21 18:59:18 2014 @@ -45,7 +45,9 @@ import org.apache.hadoop.mapred.lib.Iden import org.apache.hadoop.mapred.lib.IdentityReducer; import org.apache.hadoop.mapreduce.Cluster.JobTrackerStatus; import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; +import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler; import org.junit.Test; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; @@ -76,8 +78,7 @@ public class TestNetworkedJob { FileSystem fileSys = null; try { - mr = MiniMRClientClusterFactory.create(this.getClass(), 2, - new Configuration()); + mr = createMiniClusterWithCapacityScheduler(); JobConf job = new JobConf(mr.getConfig()); @@ -129,8 +130,7 @@ public class TestNetworkedJob { FileSystem fileSys = null; try { - Configuration conf = new Configuration(); - mr = MiniMRClientClusterFactory.create(this.getClass(), 2, conf); + mr = createMiniClusterWithCapacityScheduler(); JobConf job = new JobConf(mr.getConfig()); @@ -315,8 +315,7 @@ public class TestNetworkedJob { FileSystem fileSys = null; PrintStream oldOut = System.out; try { - Configuration conf = new Configuration(); - mr = MiniMRClientClusterFactory.create(this.getClass(), 2, conf); + mr = createMiniClusterWithCapacityScheduler(); JobConf job = new JobConf(mr.getConfig()); @@ -392,4 +391,13 @@ public class TestNetworkedJob { } } } + + private MiniMRClientCluster createMiniClusterWithCapacityScheduler() + throws IOException { + Configuration conf = new Configuration(); + // Expected queue names depending on Capacity Scheduler queue naming + conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class, + CapacityScheduler.class); + return MiniMRClientClusterFactory.create(this.getClass(), 2, conf); + } }