Author: szetszwo Date: Mon Mar 18 11:45:07 2013 New Revision: 1457716 URL: http://svn.apache.org/r1457716 Log: Merge r1455389 through r1457712 from trunk.
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/ (props changed) hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/CHANGES.txt (contents, props changed) hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/conf/ (props changed) hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/CountersBlock.java hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/TokenCache.java hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml (props changed) hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestFetcher.java hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksPage.java hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/MockHistoryJobs.java hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipeApplication.java Propchange: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project:r1455389-1457712 Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/CHANGES.txt?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/CHANGES.txt (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/CHANGES.txt Mon Mar 18 11:45:07 2013 @@ -230,6 +230,11 @@ Release 2.0.5-beta - UNRELEASED appropriately used and that on-disk segments are correctly sorted on file-size. (Anty Rao and Ravi Prakash via acmurthy) + MAPREDUCE-4571. TestHsWebServicesJobs fails on jdk7. (tgraves via tucu) + + MAPREDUCE-4716. TestHsWebServicesJobsQuery.testJobsQueryStateInvalid + fails with jdk7. (tgraves via tucu) + Release 2.0.4-alpha - UNRELEASED INCOMPATIBLE CHANGES @@ -807,6 +812,12 @@ Release 0.23.7 - UNRELEASED MAPREDUCE-5023. History Server Web Services missing Job Counters (Ravi Prakash via tgraves) + MAPREDUCE-5060. Fetch failures that time out only count against the first + map task (Robert Joseph Evans via jlowe) + + MAPREDUCE-5042. Reducer unable to fetch for a map task that was recovered + (Jason Lowe via bobby) + Release 0.23.6 - UNRELEASED INCOMPATIBLE CHANGES Propchange: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/CHANGES.txt ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt:r1455389-1457712 Propchange: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/conf/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/conf:r1455389-1457712 Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java Mon Mar 18 11:45:07 2013 @@ -269,9 +269,17 @@ class YarnChild { job.setBoolean("ipc.client.tcpnodelay", true); job.setClass(MRConfig.TASK_LOCAL_OUTPUT_CLASS, YarnOutputFiles.class, MapOutputFile.class); - // set the jobTokenFile into task + // set the jobToken and shuffle secrets into task task.setJobTokenSecret( JobTokenSecretManager.createSecretKey(jt.getPassword())); + byte[] shuffleSecret = TokenCache.getShuffleSecretKey(credentials); + if (shuffleSecret == null) { + LOG.warn("Shuffle secret missing from task credentials." + + " Using job token secret as shuffle secret."); + shuffleSecret = jt.getPassword(); + } + task.setShuffleSecret( + JobTokenSecretManager.createSecretKey(shuffleSecret)); // setup the child's MRConfig.LOCAL_DIR. configureLocalDirs(task, job); Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java Mon Mar 18 11:45:07 2013 @@ -55,6 +55,7 @@ import org.apache.hadoop.mapreduce.jobhi import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo; +import org.apache.hadoop.mapreduce.security.TokenCache; import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; @@ -339,8 +340,15 @@ public class MRAppMaster extends Composi boolean recoveryEnabled = conf.getBoolean( MRJobConfig.MR_AM_JOB_RECOVERY_ENABLE, true); boolean recoverySupportedByCommitter = committer.isRecoverySupported(); + + // If a shuffle secret was not provided by the job client then this app + // attempt will generate one. However that disables recovery if there + // are reducers as the shuffle secret would be app attempt specific. + boolean shuffleKeyValidForRecovery = (numReduceTasks > 0 && + TokenCache.getShuffleSecretKey(fsTokens) != null); + if (recoveryEnabled && recoverySupportedByCommitter - && appAttemptID.getAttemptId() > 1) { + && shuffleKeyValidForRecovery && appAttemptID.getAttemptId() > 1) { LOG.info("Recovery is enabled. " + "Will try to recover from previous life on best effort basis."); recoveryServ = createRecoveryService(context); @@ -351,7 +359,8 @@ public class MRAppMaster extends Composi } else { LOG.info("Not starting RecoveryService: recoveryEnabled: " + recoveryEnabled + " recoverySupportedByCommitter: " - + recoverySupportedByCommitter + " ApplicationAttemptID: " + + recoverySupportedByCommitter + " shuffleKeyValidForRecovery: " + + shuffleKeyValidForRecovery + " ApplicationAttemptID: " + appAttemptID.getAttemptId()); dispatcher = createDispatcher(); addIfService(dispatcher); @@ -471,7 +480,11 @@ public class MRAppMaster extends Composi protected FileSystem getFileSystem(Configuration conf) throws IOException { return FileSystem.get(conf); } - + + protected Credentials getCredentials() { + return fsTokens; + } + /** * clean up staging directories for the job. * @throws IOException Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java Mon Mar 18 11:45:07 2013 @@ -1350,13 +1350,13 @@ public class JobImpl implements org.apac LOG.info("Adding job token for " + oldJobIDString + " to jobTokenSecretManager"); - // Upload the jobTokens onto the remote FS so that ContainerManager can - // localize it to be used by the Containers(tasks) - Credentials tokenStorage = new Credentials(); - TokenCache.setJobToken(job.jobToken, tokenStorage); - - if (UserGroupInformation.isSecurityEnabled()) { - tokenStorage.addAll(job.fsTokens); + // If the job client did not setup the shuffle secret then reuse + // the job token secret for the shuffle. + if (TokenCache.getShuffleSecretKey(job.fsTokens) == null) { + LOG.warn("Shuffle secret key missing from job credentials." + + " Using job token secret as shuffle secret."); + TokenCache.setShuffleSecretKey(job.jobToken.getPassword(), + job.fsTokens); } } Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java Mon Mar 18 11:45:07 2013 @@ -702,10 +702,21 @@ public abstract class TaskAttemptImpl im ByteBuffer.wrap(containerTokens_dob.getData(), 0, containerTokens_dob.getLength()); - // Add shuffle token + // Add shuffle secret key + // The secret key is converted to a JobToken to preserve backwards + // compatibility with an older ShuffleHandler running on an NM. LOG.info("Putting shuffle token in serviceData"); + byte[] shuffleSecret = TokenCache.getShuffleSecretKey(credentials); + if (shuffleSecret == null) { + LOG.warn("Cannot locate shuffle secret in credentials." + + " Using job token as shuffle secret."); + shuffleSecret = jobToken.getPassword(); + } + Token<JobTokenIdentifier> shuffleToken = new Token<JobTokenIdentifier>( + jobToken.getIdentifier(), shuffleSecret, jobToken.getKind(), + jobToken.getService()); serviceData.put(ShuffleHandler.MAPREDUCE_SHUFFLE_SERVICEID, - ShuffleHandler.serializeServiceData(jobToken)); + ShuffleHandler.serializeServiceData(shuffleToken)); Apps.addToEnvironment( environment, Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/CountersBlock.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/CountersBlock.java?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/CountersBlock.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/CountersBlock.java Mon Mar 18 11:45:07 2013 @@ -111,7 +111,7 @@ public class CountersBlock extends HtmlB th().$title(g.getName()).$class("ui-state-default"). _(fixGroupDisplayName(g.getDisplayName()))._(). td().$class(C_TABLE). - table(".dt-counters"). + table(".dt-counters").$id(job.getID()+"."+g.getName()). thead(). tr().th(".name", "Name"); Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java Mon Mar 18 11:45:07 2013 @@ -42,6 +42,7 @@ import org.apache.hadoop.mapreduce.TaskA import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent; import org.apache.hadoop.mapreduce.jobhistory.NormalizedResourceEvent; +import org.apache.hadoop.mapreduce.security.TokenCache; import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager; import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo; import org.apache.hadoop.mapreduce.v2.api.records.JobId; @@ -144,6 +145,9 @@ public class MRApp extends MRAppMaster { @Override protected void downloadTokensAndSetupUGI(Configuration conf) { + // Fake a shuffle secret that normally is provided by the job client. + String shuffleSecret = "fake-shuffle-secret"; + TokenCache.setShuffleSecretKey(shuffleSecret.getBytes(), getCredentials()); } private static ApplicationAttemptId getApplicationAttemptId( Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java Mon Mar 18 11:45:07 2013 @@ -900,6 +900,117 @@ public class TestRecovery { } + @Test(timeout=30000) + public void testRecoveryWithoutShuffleSecret() throws Exception { + + int runCount = 0; + MRApp app = new MRAppNoShuffleSecret(2, 1, false, + this.getClass().getName(), true, ++runCount); + Configuration conf = new Configuration(); + conf.setBoolean("mapred.mapper.new-api", true); + conf.setBoolean("mapred.reducer.new-api", true); + conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); + conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); + Job job = app.submit(conf); + app.waitForState(job, JobState.RUNNING); + //all maps would be running + Assert.assertEquals("No of tasks not correct", + 3, job.getTasks().size()); + Iterator<Task> it = job.getTasks().values().iterator(); + Task mapTask1 = it.next(); + Task mapTask2 = it.next(); + Task reduceTask = it.next(); + + // all maps must be running + app.waitForState(mapTask1, TaskState.RUNNING); + app.waitForState(mapTask2, TaskState.RUNNING); + + TaskAttempt task1Attempt = mapTask1.getAttempts().values().iterator().next(); + TaskAttempt task2Attempt = mapTask2.getAttempts().values().iterator().next(); + + //before sending the TA_DONE, event make sure attempt has come to + //RUNNING state + app.waitForState(task1Attempt, TaskAttemptState.RUNNING); + app.waitForState(task2Attempt, TaskAttemptState.RUNNING); + + // reduces must be in NEW state + Assert.assertEquals("Reduce Task state not correct", + TaskState.RUNNING, reduceTask.getReport().getTaskState()); + + //send the done signal to the 1st map attempt + app.getContext().getEventHandler().handle( + new TaskAttemptEvent( + task1Attempt.getID(), + TaskAttemptEventType.TA_DONE)); + + //wait for first map task to complete + app.waitForState(mapTask1, TaskState.SUCCEEDED); + + //stop the app + app.stop(); + + //in recovery the 1st map should NOT be recovered from previous run + //since the shuffle secret was not provided with the job credentials + //and had to be rolled per app attempt + app = new MRAppNoShuffleSecret(2, 1, false, + this.getClass().getName(), false, ++runCount); + conf = new Configuration(); + conf.setBoolean(MRJobConfig.MR_AM_JOB_RECOVERY_ENABLE, true); + conf.setBoolean("mapred.mapper.new-api", true); + conf.setBoolean("mapred.reducer.new-api", true); + conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); + conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); + job = app.submit(conf); + app.waitForState(job, JobState.RUNNING); + //all maps would be running + Assert.assertEquals("No of tasks not correct", + 3, job.getTasks().size()); + it = job.getTasks().values().iterator(); + mapTask1 = it.next(); + mapTask2 = it.next(); + reduceTask = it.next(); + + app.waitForState(mapTask1, TaskState.RUNNING); + app.waitForState(mapTask2, TaskState.RUNNING); + + task2Attempt = mapTask2.getAttempts().values().iterator().next(); + //before sending the TA_DONE, event make sure attempt has come to + //RUNNING state + app.waitForState(task2Attempt, TaskAttemptState.RUNNING); + + //send the done signal to the 2nd map task + app.getContext().getEventHandler().handle( + new TaskAttemptEvent( + mapTask2.getAttempts().values().iterator().next().getID(), + TaskAttemptEventType.TA_DONE)); + + //wait to get it completed + app.waitForState(mapTask2, TaskState.SUCCEEDED); + + //verify first map task is still running + app.waitForState(mapTask1, TaskState.RUNNING); + + //send the done signal to the 2nd map task + app.getContext().getEventHandler().handle( + new TaskAttemptEvent( + mapTask1.getAttempts().values().iterator().next().getID(), + TaskAttemptEventType.TA_DONE)); + + //wait to get it completed + app.waitForState(mapTask1, TaskState.SUCCEEDED); + + //wait for reduce to be running before sending done + app.waitForState(reduceTask, TaskState.RUNNING); + //send the done signal to the reduce + app.getContext().getEventHandler().handle( + new TaskAttemptEvent( + reduceTask.getAttempts().values().iterator().next().getID(), + TaskAttemptEventType.TA_DONE)); + + app.waitForState(job, JobState.SUCCEEDED); + app.verifyCompleted(); + } + private void writeBadOutput(TaskAttempt attempt, Configuration conf) throws Exception { TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, @@ -1019,6 +1130,18 @@ public class TestRecovery { } } + static class MRAppNoShuffleSecret extends MRAppWithHistory { + public MRAppNoShuffleSecret(int maps, int reduces, boolean autoComplete, + String testName, boolean cleanOnStart, int startCount) { + super(maps, reduces, autoComplete, testName, cleanOnStart, startCount); + } + + @Override + protected void downloadTokensAndSetupUGI(Configuration conf) { + // do NOT put a shuffle secret in the job credentials + } + } + public static void main(String[] arg) throws Exception { TestRecovery test = new TestRecovery(); test.testCrashed(); Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java Mon Mar 18 11:45:07 2013 @@ -491,7 +491,7 @@ public class TestJobImpl { MRAppMetrics mrAppMetrics = MRAppMetrics.create(); JobImpl job = new JobImpl(jobId, Records .newRecord(ApplicationAttemptId.class), conf, mock(EventHandler.class), - null, mock(JobTokenSecretManager.class), null, null, null, + null, new JobTokenSecretManager(), new Credentials(), null, null, mrAppMetrics, true, null, 0, null, null, null, null); InitTransition initTransition = getInitTransition(2); JobEvent mockJobEvent = mock(JobEvent.class); Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java Mon Mar 18 11:45:07 2013 @@ -185,6 +185,7 @@ abstract public class Task implements Wr private int numSlotsRequired; protected TaskUmbilicalProtocol umbilical; protected SecretKey tokenSecret; + protected SecretKey shuffleSecret; protected GcTimeUpdater gcUpdater; //////////////////////////////////////////// @@ -261,7 +262,22 @@ abstract public class Task implements Wr return this.tokenSecret; } - + /** + * Set the secret key used to authenticate the shuffle + * @param shuffleSecret the secret + */ + public void setShuffleSecret(SecretKey shuffleSecret) { + this.shuffleSecret = shuffleSecret; + } + + /** + * Get the secret key used to authenticate the shuffle + * @return the shuffle secret + */ + public SecretKey getShuffleSecret() { + return this.shuffleSecret; + } + /** * Get the index of this task within the job. * @return the integer part of the task id Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java Mon Mar 18 11:45:07 2013 @@ -23,11 +23,15 @@ import java.net.InetAddress; import java.net.URI; import java.net.URISyntaxException; import java.net.UnknownHostException; +import java.security.NoSuchAlgorithmException; import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Map; +import javax.crypto.KeyGenerator; +import javax.crypto.SecretKey; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -62,6 +66,8 @@ import com.google.common.base.Charsets; @InterfaceStability.Unstable class JobSubmitter { protected static final Log LOG = LogFactory.getLog(JobSubmitter.class); + private static final String SHUFFLE_KEYGEN_ALGORITHM = "HmacSHA1"; + private static final int SHUFFLE_KEY_LENGTH = 64; private FileSystem jtFs; private ClientProtocol submitClient; private String submitHostName; @@ -359,6 +365,20 @@ class JobSubmitter { populateTokenCache(conf, job.getCredentials()); + // generate a secret to authenticate shuffle transfers + if (TokenCache.getShuffleSecretKey(job.getCredentials()) == null) { + KeyGenerator keyGen; + try { + keyGen = KeyGenerator.getInstance(SHUFFLE_KEYGEN_ALGORITHM); + keyGen.init(SHUFFLE_KEY_LENGTH); + } catch (NoSuchAlgorithmException e) { + throw new IOException("Error generating shuffle secret key", e); + } + SecretKey shuffleKey = keyGen.generateKey(); + TokenCache.setShuffleSecretKey(shuffleKey.getEncoded(), + job.getCredentials()); + } + copyAndConfigureFiles(job, submitJobDir); Path submitJobFile = JobSubmissionFiles.getJobConfPath(submitJobDir); Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/TokenCache.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/TokenCache.java?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/TokenCache.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/TokenCache.java Mon Mar 18 11:45:07 2013 @@ -154,7 +154,8 @@ public class TokenCache { */ @InterfaceAudience.Private public static final String JOB_TOKENS_FILENAME = "mapreduce.job.jobTokenFile"; - private static final Text JOB_TOKEN = new Text("ShuffleAndJobToken"); + private static final Text JOB_TOKEN = new Text("JobToken"); + private static final Text SHUFFLE_TOKEN = new Text("MapReduceShuffleToken"); /** * load job token from a file @@ -194,4 +195,14 @@ public class TokenCache { public static Token<JobTokenIdentifier> getJobToken(Credentials credentials) { return (Token<JobTokenIdentifier>) credentials.getToken(JOB_TOKEN); } + + @InterfaceAudience.Private + public static void setShuffleSecretKey(byte[] key, Credentials credentials) { + credentials.addSecretKey(SHUFFLE_TOKEN, key); + } + + @InterfaceAudience.Private + public static byte[] getShuffleSecretKey(Credentials credentials) { + return getSecretKey(credentials, SHUFFLE_TOKEN); + } } Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java Mon Mar 18 11:45:07 2013 @@ -82,7 +82,7 @@ class Fetcher<K,V> extends Thread { private final int connectionTimeout; private final int readTimeout; - private final SecretKey jobTokenSecret; + private final SecretKey shuffleSecretKey; private volatile boolean stopped = false; @@ -92,7 +92,7 @@ class Fetcher<K,V> extends Thread { public Fetcher(JobConf job, TaskAttemptID reduceId, ShuffleScheduler<K,V> scheduler, MergeManager<K,V> merger, Reporter reporter, ShuffleClientMetrics metrics, - ExceptionReporter exceptionReporter, SecretKey jobTokenSecret) { + ExceptionReporter exceptionReporter, SecretKey shuffleKey) { this.reporter = reporter; this.scheduler = scheduler; this.merger = merger; @@ -100,7 +100,7 @@ class Fetcher<K,V> extends Thread { this.exceptionReporter = exceptionReporter; this.id = ++nextId; this.reduce = reduceId.getTaskID().getId(); - this.jobTokenSecret = jobTokenSecret; + this.shuffleSecretKey = shuffleKey; ioErrs = reporter.getCounter(SHUFFLE_ERR_GRP_NAME, ShuffleErrors.IO_ERROR.toString()); wrongLengthErrs = reporter.getCounter(SHUFFLE_ERR_GRP_NAME, @@ -221,7 +221,6 @@ class Fetcher<K,V> extends Thread { // Construct the url and connect DataInputStream input; - boolean connectSucceeded = false; try { URL url = getMapOutputURL(host, maps); @@ -229,7 +228,8 @@ class Fetcher<K,V> extends Thread { // generate hash of the url String msgToEncode = SecureShuffleUtils.buildMsgFrom(url); - String encHash = SecureShuffleUtils.hashFromString(msgToEncode, jobTokenSecret); + String encHash = SecureShuffleUtils.hashFromString(msgToEncode, + shuffleSecretKey); // put url hash into http header connection.addRequestProperty( @@ -237,7 +237,6 @@ class Fetcher<K,V> extends Thread { // set the read timeout connection.setReadTimeout(readTimeout); connect(connection, connectionTimeout); - connectSucceeded = true; input = new DataInputStream(connection.getInputStream()); // Validate response code @@ -255,7 +254,7 @@ class Fetcher<K,V> extends Thread { } LOG.debug("url="+msgToEncode+";encHash="+encHash+";replyHash="+replyHash); // verify that replyHash is HMac of encHash - SecureShuffleUtils.verifyReply(replyHash, encHash, jobTokenSecret); + SecureShuffleUtils.verifyReply(replyHash, encHash, shuffleSecretKey); LOG.info("for url="+msgToEncode+" sent hash and received reply"); } catch (IOException ie) { boolean connectExcpt = ie instanceof ConnectException; @@ -265,18 +264,10 @@ class Fetcher<K,V> extends Thread { // If connect did not succeed, just mark all the maps as failed, // indirectly penalizing the host - if (!connectSucceeded) { - for(TaskAttemptID left: remaining) { - scheduler.copyFailed(left, host, connectSucceeded, connectExcpt); - } - } else { - // If we got a read error at this stage, it implies there was a problem - // with the first map, typically lost map. So, penalize only that map - // and add the rest - TaskAttemptID firstMap = maps.get(0); - scheduler.copyFailed(firstMap, host, connectSucceeded, connectExcpt); + for(TaskAttemptID left: remaining) { + scheduler.copyFailed(left, host, false, connectExcpt); } - + // Add back all the remaining maps, WITHOUT marking them as failed for(TaskAttemptID left: remaining) { scheduler.putBackKnownMapOutput(host, left); Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java Mon Mar 18 11:45:07 2013 @@ -108,7 +108,7 @@ public class Shuffle<K, V> implements Sh for (int i=0; i < numFetchers; ++i) { fetchers[i] = new Fetcher<K,V>(jobConf, reduceId, scheduler, merger, reporter, metrics, this, - reduceTask.getJobTokenSecret()); + reduceTask.getShuffleSecret()); fetchers[i].start(); } Propchange: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:r1455389-1457712 Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestFetcher.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestFetcher.java?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestFetcher.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestFetcher.java Mon Mar 18 11:45:07 2013 @@ -26,6 +26,7 @@ import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.io.IOException; import java.net.HttpURLConnection; +import java.net.SocketTimeoutException; import java.net.URL; import java.util.ArrayList; @@ -71,6 +72,54 @@ public class TestFetcher { } @SuppressWarnings("unchecked") + @Test(timeout=30000) + public void testCopyFromHostConnectionTimeout() throws Exception { + LOG.info("testCopyFromHostConnectionTimeout"); + JobConf job = new JobConf(); + TaskAttemptID id = TaskAttemptID.forName("attempt_0_1_r_1_1"); + ShuffleScheduler<Text, Text> ss = mock(ShuffleScheduler.class); + MergeManagerImpl<Text, Text> mm = mock(MergeManagerImpl.class); + Reporter r = mock(Reporter.class); + ShuffleClientMetrics metrics = mock(ShuffleClientMetrics.class); + ExceptionReporter except = mock(ExceptionReporter.class); + SecretKey key = JobTokenSecretManager.createSecretKey(new byte[]{0,0,0,0}); + HttpURLConnection connection = mock(HttpURLConnection.class); + when(connection.getInputStream()).thenThrow( + new SocketTimeoutException("This is a fake timeout :)")); + + Counters.Counter allErrs = mock(Counters.Counter.class); + when(r.getCounter(anyString(), anyString())) + .thenReturn(allErrs); + + Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm, + r, metrics, except, key, connection); + + MapHost host = new MapHost("localhost", "http://localhost:8080/"); + + ArrayList<TaskAttemptID> maps = new ArrayList<TaskAttemptID>(1); + TaskAttemptID map1ID = TaskAttemptID.forName("attempt_0_1_m_1_1"); + maps.add(map1ID); + TaskAttemptID map2ID = TaskAttemptID.forName("attempt_0_1_m_2_1"); + maps.add(map2ID); + when(ss.getMapsForHost(host)).thenReturn(maps); + + String encHash = "vFE234EIFCiBgYs2tCXY/SjT8Kg="; + + underTest.copyFromHost(host); + + verify(connection) + .addRequestProperty(SecureShuffleUtils.HTTP_HEADER_URL_HASH, + encHash); + + verify(allErrs).increment(1); + verify(ss).copyFailed(map1ID, host, false, false); + verify(ss).copyFailed(map2ID, host, false, false); + + verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID)); + verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID)); + } + + @SuppressWarnings("unchecked") @Test public void testCopyFromHostBogusHeader() throws Exception { LOG.info("testCopyFromHostBogusHeader"); Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java Mon Mar 18 11:45:07 2013 @@ -65,8 +65,12 @@ public class HsTasksBlock extends HtmlBl if (!symbol.isEmpty()) { type = MRApps.taskType(symbol); } - - THEAD<TABLE<Hamlet>> thead = html.table("#tasks").thead(); + THEAD<TABLE<Hamlet>> thead; + if(type != null) + thead = html.table("#"+app.getJob().getID() + + type).$class("dt-tasks").thead(); + else + thead = html.table("#tasks").thead(); //Create the spanning row int attemptColSpan = type == TaskType.REDUCE ? 8 : 3; thead.tr(). Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksPage.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksPage.java?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksPage.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksPage.java Mon Mar 18 11:45:07 2013 @@ -22,7 +22,9 @@ import static org.apache.hadoop.mapreduc import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION; import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES; import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID; +import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_SELECTOR; import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID; +import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initSelector; import static org.apache.hadoop.yarn.webapp.view.JQueryUI.postInitID; import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit; @@ -42,6 +44,8 @@ public class HsTasksPage extends HsView @Override protected void preHead(Page.HTML<_> html) { commonPreHead(html); set(DATATABLES_ID, "tasks"); + set(DATATABLES_SELECTOR, ".dt-tasks" ); + set(initSelector(DATATABLES), tasksTableInit()); set(initID(ACCORDION, "nav"), "{autoHeight:false, active:1}"); set(initID(DATATABLES, "tasks"), tasksTableInit()); set(postInitID(DATATABLES, "tasks"), jobsPostTableInit()); Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/MockHistoryJobs.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/MockHistoryJobs.java?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/MockHistoryJobs.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/MockHistoryJobs.java Mon Mar 18 11:45:07 2013 @@ -77,13 +77,18 @@ public class MockHistoryJobs extends Moc for(Map.Entry<JobId, Job> entry: mocked.entrySet()) { JobId id = entry.getKey(); Job j = entry.getValue(); - ret.full.put(id, new MockCompletedJob(j)); - JobReport report = j.getReport(); + MockCompletedJob mockJob = new MockCompletedJob(j); + // use MockCompletedJob to set everything below to make sure + // consistent with what history server would do + ret.full.put(id, mockJob); + JobReport report = mockJob.getReport(); JobIndexInfo info = new JobIndexInfo(report.getStartTime(), - report.getFinishTime(), j.getUserName(), j.getName(), id, - j.getCompletedMaps(), j.getCompletedReduces(), String.valueOf(j.getState())); - info.setQueueName(j.getQueueName()); + report.getFinishTime(), mockJob.getUserName(), mockJob.getName(), id, + mockJob.getCompletedMaps(), mockJob.getCompletedReduces(), + String.valueOf(mockJob.getState())); + info.setQueueName(mockJob.getQueueName()); ret.partial.put(id, new PartialJob(info, id)); + } return ret; } @@ -99,12 +104,16 @@ public class MockHistoryJobs extends Moc @Override public int getCompletedMaps() { - return job.getCompletedMaps(); + // we always return total since this is history server + // and PartialJob also assumes completed - total + return job.getTotalMaps(); } @Override public int getCompletedReduces() { - return job.getCompletedReduces(); + // we always return total since this is history server + // and PartialJob also assumes completed - total + return job.getTotalReduces(); } @Override Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java Mon Mar 18 11:45:07 2013 @@ -117,6 +117,7 @@ public class TestHsWebServicesJobs exten fullJobs = jobs.full; } + TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) { this(appid, numJobs, numTasks, numAttempts, false); } @@ -411,7 +412,8 @@ public class TestHsWebServicesJobs exten JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject info = json.getJSONObject("job"); - VerifyJobsUtils.verifyHsJob(info, jobsMap.get(id)); + + VerifyJobsUtils.verifyHsJob(info, appContext.getJob(id)); } } @@ -613,7 +615,7 @@ public class TestHsWebServicesJobs exten JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject info = json.getJSONObject("jobCounters"); - verifyHsJobCounters(info, jobsMap.get(id)); + verifyHsJobCounters(info, appContext.getJob(id)); } } @@ -631,7 +633,7 @@ public class TestHsWebServicesJobs exten JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject info = json.getJSONObject("jobCounters"); - verifyHsJobCounters(info, jobsMap.get(id)); + verifyHsJobCounters(info, appContext.getJob(id)); } } @@ -689,7 +691,7 @@ public class TestHsWebServicesJobs exten JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject info = json.getJSONObject("jobCounters"); - verifyHsJobCounters(info, jobsMap.get(id)); + verifyHsJobCounters(info, appContext.getJob(id)); } } @@ -711,7 +713,7 @@ public class TestHsWebServicesJobs exten is.setCharacterStream(new StringReader(xml)); Document dom = db.parse(is); NodeList info = dom.getElementsByTagName("jobCounters"); - verifyHsJobCountersXML(info, jobsMap.get(id)); + verifyHsJobCountersXML(info, appContext.getJob(id)); } } Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java Mon Mar 18 11:45:07 2013 @@ -284,9 +284,9 @@ public class TestHsWebServicesJobsQuery String type = exception.getString("exception"); String classname = exception.getString("javaClassName"); WebServicesTestUtils - .checkStringMatch( + .checkStringContains( "exception message", - "No enum const class org.apache.hadoop.mapreduce.v2.api.records.JobState.InvalidState", + "org.apache.hadoop.mapreduce.v2.api.records.JobState.InvalidState", message); WebServicesTestUtils.checkStringMatch("exception type", "IllegalArgumentException", type); Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipeApplication.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipeApplication.java?rev=1457716&r1=1457715&r2=1457716&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipeApplication.java (original) +++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipeApplication.java Mon Mar 18 11:45:07 2013 @@ -47,6 +47,7 @@ import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.IFile.Writer; import org.apache.hadoop.mapreduce.MRJobConfig; +import org.apache.hadoop.mapreduce.security.TokenCache; import org.apache.hadoop.mapred.Counters; import org.apache.hadoop.mapred.Counters.Counter; import org.apache.hadoop.mapred.Counters.Group; @@ -106,7 +107,7 @@ public class TestPipeApplication { Token<ApplicationTokenIdentifier> token = new Token<ApplicationTokenIdentifier>( "user".getBytes(), "password".getBytes(), new Text("kind"), new Text( "service")); - conf.getCredentials().addToken(new Text("ShuffleAndJobToken"), token); + TokenCache.setJobToken(token, conf.getCredentials()); conf.setBoolean(MRJobConfig.SKIP_RECORDS, true); TestTaskReporter reporter = new TestTaskReporter(); PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text> runner = new PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text>(); @@ -171,7 +172,7 @@ public class TestPipeApplication { "user".getBytes(), "password".getBytes(), new Text("kind"), new Text( "service")); - conf.getCredentials().addToken(new Text("ShuffleAndJobToken"), token); + TokenCache.setJobToken(token, conf.getCredentials()); FakeCollector output = new FakeCollector(new Counters.Counter(), new Progress()); FileSystem fs = new RawLocalFileSystem(); @@ -391,7 +392,7 @@ public class TestPipeApplication { Token<ApplicationTokenIdentifier> token = new Token<ApplicationTokenIdentifier>( "user".getBytes(), "password".getBytes(), new Text("kind"), new Text( "service")); - conf.getCredentials().addToken(new Text("ShuffleAndJobToken"), token); + TokenCache.setJobToken(token, conf.getCredentials()); File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeReducerStub"); conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());