Modified: hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java?rev=1532967&r1=1532966&r2=1532967&view=diff ============================================================================== --- hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java (original) +++ hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java Thu Oct 17 05:32:42 2013 @@ -29,6 +29,8 @@ import java.io.PipedInputStream; import java.io.PipedOutputStream; import java.io.PrintStream; +import junit.framework.Assert; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -60,6 +62,22 @@ public class TestMRJobClient extends Clu return job; } + private Job runJobInBackGround(Configuration conf) throws Exception { + String input = "hello1\nhello2\nhello3\n"; + + Job job = MapReduceTestUtil.createJob(conf, getInputDir(), getOutputDir(), + 1, 1, input); + job.setJobName("mr"); + job.setPriority(JobPriority.NORMAL); + job.submit(); + int i = 0; + while (i++ < 200 && job.getJobID() == null) { + LOG.info("waiting for jobId..."); + Thread.sleep(100); + } + return job; + } + public static int runTool(Configuration conf, Tool tool, String[] args, OutputStream out) throws Exception { PrintStream oldOut = System.out; @@ -108,8 +126,10 @@ public class TestMRJobClient extends Clu Job job = runJob(conf); String jobId = job.getJobID().toString(); - // test jobs list - testJobList(jobId, conf); + // test all jobs list + testAllJobList(jobId, conf); + // test only submitted jobs list + testSubmittedJobList(conf); // test job counter testGetCounter(jobId, conf); // status @@ -131,38 +151,37 @@ public class TestMRJobClient extends Clu // submit job from file testSubmit(conf); // kill a task - testKillTask(job, conf); + testKillTask(conf); // fail a task - testfailTask(job, conf); + testfailTask(conf); // kill job - testKillJob(jobId, conf); - + testKillJob(conf); } /** * test fail task */ - private void testfailTask(Job job, Configuration conf) throws Exception { + private void testfailTask(Configuration conf) throws Exception { + Job job = runJobInBackGround(conf); CLI jc = createJobClient(); TaskID tid = new TaskID(job.getJobID(), TaskType.MAP, 0); TaskAttemptID taid = new TaskAttemptID(tid, 1); ByteArrayOutputStream out = new ByteArrayOutputStream(); - // TaskAttemptId is not set + // TaskAttemptId is not set int exitCode = runTool(conf, jc, new String[] { "-fail-task" }, out); assertEquals("Exit code", -1, exitCode); - try { - runTool(conf, jc, new String[] { "-fail-task", taid.toString() }, out); - fail(" this task should field"); - } catch (IOException e) { - // task completed ! - assertTrue(e.getMessage().contains("_0001_m_000000_1")); - } + runTool(conf, jc, new String[] { "-fail-task", taid.toString() }, out); + String answer = new String(out.toByteArray(), "UTF-8"); + Assert + .assertTrue(answer.contains("Killed task " + taid + " by failing it")); } + /** * test a kill task */ - private void testKillTask(Job job, Configuration conf) throws Exception { + private void testKillTask(Configuration conf) throws Exception { + Job job = runJobInBackGround(conf); CLI jc = createJobClient(); TaskID tid = new TaskID(job.getJobID(), TaskType.MAP, 0); TaskAttemptID taid = new TaskAttemptID(tid, 1); @@ -171,20 +190,17 @@ public class TestMRJobClient extends Clu int exitCode = runTool(conf, jc, new String[] { "-kill-task" }, out); assertEquals("Exit code", -1, exitCode); - try { - runTool(conf, jc, new String[] { "-kill-task", taid.toString() }, out); - fail(" this task should be killed"); - } catch (IOException e) { - System.out.println(e); - // task completed - assertTrue(e.getMessage().contains("_0001_m_000000_1")); - } + runTool(conf, jc, new String[] { "-kill-task", taid.toString() }, out); + String answer = new String(out.toByteArray(), "UTF-8"); + Assert.assertTrue(answer.contains("Killed task " + taid)); } /** * test a kill job */ - private void testKillJob(String jobId, Configuration conf) throws Exception { + private void testKillJob(Configuration conf) throws Exception { + Job job = runJobInBackGround(conf); + String jobId = job.getJobID().toString(); CLI jc = createJobClient(); ByteArrayOutputStream out = new ByteArrayOutputStream(); @@ -435,7 +451,8 @@ public class TestMRJobClient extends Clu /** * print a job list */ - protected void testJobList(String jobId, Configuration conf) throws Exception { + protected void testAllJobList(String jobId, Configuration conf) + throws Exception { ByteArrayOutputStream out = new ByteArrayOutputStream(); // bad options @@ -458,23 +475,31 @@ public class TestMRJobClient extends Clu } assertEquals(1, counter); out.reset(); - // only submitted - exitCode = runTool(conf, createJobClient(), new String[] { "-list" }, out); - assertEquals("Exit code", 0, exitCode); - br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream( - out.toByteArray()))); + } + + protected void testSubmittedJobList(Configuration conf) throws Exception { + Job job = runJobInBackGround(conf); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + String line; + int counter = 0; + // only submitted + int exitCode = + runTool(conf, createJobClient(), new String[] { "-list" }, out); + assertEquals("Exit code", 0, exitCode); + BufferedReader br = + new BufferedReader(new InputStreamReader(new ByteArrayInputStream( + out.toByteArray()))); counter = 0; while ((line = br.readLine()) != null) { LOG.info("line = " + line); - if (line.contains(jobId)) { + if (line.contains(job.getJobID().toString())) { counter++; } } // all jobs submitted! no current assertEquals(1, counter); - } - + protected void verifyJobPriority(String jobId, String priority, Configuration conf, CLI jc) throws Exception { PipedInputStream pis = new PipedInputStream();
Modified: hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestJHSSecurity.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestJHSSecurity.java?rev=1532967&r1=1532966&r2=1532967&view=diff ============================================================================== --- hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestJHSSecurity.java (original) +++ hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestJHSSecurity.java Thu Oct 17 05:32:42 2013 @@ -39,6 +39,7 @@ import org.apache.hadoop.mapreduce.v2.ap import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenRequest; import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest; import org.apache.hadoop.mapreduce.v2.api.protocolrecords.RenewDelegationTokenRequest; +import org.apache.hadoop.mapreduce.v2.hs.HistoryServerStateStoreService; import org.apache.hadoop.mapreduce.v2.hs.JHSDelegationTokenSecretManager; import org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; @@ -87,10 +88,11 @@ public class TestJHSSecurity { // no keytab based login }; + @Override protected JHSDelegationTokenSecretManager createJHSSecretManager( - Configuration conf) { + Configuration conf, HistoryServerStateStoreService store) { return new JHSDelegationTokenSecretManager(initialInterval, - maxLifetime, renewInterval, 3600000); + maxLifetime, renewInterval, 3600000, store); } }; // final JobHistoryServer jobHistoryServer = jhServer; Modified: hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java?rev=1532967&r1=1532966&r2=1532967&view=diff ============================================================================== --- hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java (original) +++ hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java Thu Oct 17 05:32:42 2013 @@ -20,6 +20,9 @@ package org.apache.hadoop.mapreduce.v2; import java.io.File; import java.io.IOException; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.UnknownHostException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -28,6 +31,7 @@ import org.apache.hadoop.fs.CommonConfig import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.mapred.LocalContainerLauncher; import org.apache.hadoop.mapred.ShuffleHandler; import org.apache.hadoop.mapreduce.MRConfig; @@ -35,6 +39,8 @@ import org.apache.hadoop.mapreduce.MRJob import org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; +import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil; +import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.service.Service; import org.apache.hadoop.util.JarFinder; @@ -43,6 +49,7 @@ import org.apache.hadoop.yarn.exceptions import org.apache.hadoop.yarn.server.MiniYARNCluster; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor; import org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor; +import org.apache.hadoop.yarn.webapp.util.WebAppUtils; /** * Configures and starts the MR-specific components in the YARN cluster. @@ -67,6 +74,38 @@ public class MiniMRYarnCluster extends M addService(historyServerWrapper); } + public static String getResolvedMRHistoryWebAppURLWithoutScheme( + Configuration conf, boolean isSSLEnabled) { + InetSocketAddress address = null; + if (isSSLEnabled) { + address = + conf.getSocketAddr(JHAdminConfig.MR_HISTORY_WEBAPP_HTTPS_ADDRESS, + JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_ADDRESS, + JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_HTTPS_PORT); + } else { + address = + conf.getSocketAddr(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS, + JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS, + JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT); } + address = NetUtils.getConnectAddress(address); + StringBuffer sb = new StringBuffer(); + InetAddress resolved = address.getAddress(); + if (resolved == null || resolved.isAnyLocalAddress() || + resolved.isLoopbackAddress()) { + String lh = address.getHostName(); + try { + lh = InetAddress.getLocalHost().getCanonicalHostName(); + } catch (UnknownHostException e) { + //Ignore and fallback. + } + sb.append(lh); + } else { + sb.append(address.getHostName()); + } + sb.append(":").append(address.getPort()); + return sb.toString(); + } + @Override public void serviceInit(Configuration conf) throws Exception { conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME); @@ -155,8 +194,8 @@ public class MiniMRYarnCluster extends M // pick free random ports. getConfig().set(JHAdminConfig.MR_HISTORY_ADDRESS, hostname + ":0"); - getConfig().set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS, - hostname + ":0"); + MRWebAppUtil.setJHSWebappURLWithoutScheme(getConfig(), hostname + + ":0"); getConfig().set(JHAdminConfig.JHS_ADMIN_ADDRESS, hostname + ":0"); } @@ -182,17 +221,18 @@ public class MiniMRYarnCluster extends M //need to do this because historyServer.init creates a new Configuration getConfig().set(JHAdminConfig.MR_HISTORY_ADDRESS, historyServer.getConfig().get(JHAdminConfig.MR_HISTORY_ADDRESS)); - getConfig().set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS, - historyServer.getConfig().get(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS)); + MRWebAppUtil.setJHSWebappURLWithoutScheme(getConfig(), + MRWebAppUtil.getJHSWebappURLWithoutScheme(historyServer.getConfig())); LOG.info("MiniMRYARN ResourceManager address: " + getConfig().get(YarnConfiguration.RM_ADDRESS)); LOG.info("MiniMRYARN ResourceManager web address: " + - getConfig().get(YarnConfiguration.RM_WEBAPP_ADDRESS)); + WebAppUtils.getRMWebAppURLWithoutScheme(getConfig())); LOG.info("MiniMRYARN HistoryServer address: " + getConfig().get(JHAdminConfig.MR_HISTORY_ADDRESS)); LOG.info("MiniMRYARN HistoryServer web address: " + - getConfig().get(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS)); + getResolvedMRHistoryWebAppURLWithoutScheme(getConfig(), + HttpConfig.isSecure())); } @Override Modified: hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java?rev=1532967&r1=1532966&r2=1532967&view=diff ============================================================================== --- hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java (original) +++ hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java Thu Oct 17 05:32:42 2013 @@ -111,7 +111,7 @@ public class TestMRJobsWithHistoryServic } } - @Test (timeout = 30000) + @Test (timeout = 90000) public void testJobHistoryData() throws IOException, InterruptedException, AvroRemoteException, ClassNotFoundException { if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) { Modified: hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestRMNMInfo.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestRMNMInfo.java?rev=1532967&r1=1532966&r2=1532967&view=diff ============================================================================== --- hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestRMNMInfo.java (original) +++ hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestRMNMInfo.java Thu Oct 17 05:32:42 2013 @@ -120,6 +120,7 @@ public class TestRMNMInfo { Assert.assertNotNull(n.get("NodeHTTPAddress")); Assert.assertNotNull(n.get("LastHealthUpdate")); Assert.assertNotNull(n.get("HealthReport")); + Assert.assertNotNull(n.get("NodeManagerVersion")); Assert.assertNotNull(n.get("NumContainers")); Assert.assertEquals( n.get("NodeId") + ": Unexpected number of used containers", @@ -156,6 +157,7 @@ public class TestRMNMInfo { Assert.assertNotNull(n.get("NodeHTTPAddress")); Assert.assertNotNull(n.get("LastHealthUpdate")); Assert.assertNotNull(n.get("HealthReport")); + Assert.assertNotNull(n.get("NodeManagerVersion")); Assert.assertNull(n.get("NumContainers")); Assert.assertNull(n.get("UsedMemoryMB")); Assert.assertNull(n.get("AvailableMemoryMB")); Modified: hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java?rev=1532967&r1=1532966&r2=1532967&view=diff ============================================================================== --- hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java (original) +++ hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java Thu Oct 17 05:32:42 2013 @@ -146,7 +146,7 @@ public class ShuffleHandler extends Auxi private ReadaheadPool readaheadPool = ReadaheadPool.getInstance(); public static final String MAPREDUCE_SHUFFLE_SERVICEID = - "mapreduce.shuffle"; + "mapreduce_shuffle"; private static final Map<String,String> userRsrc = new ConcurrentHashMap<String,String>(); Modified: hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraSort.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraSort.java?rev=1532967&r1=1532966&r2=1532967&view=diff ============================================================================== --- hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraSort.java (original) +++ hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraSort.java Thu Oct 17 05:32:42 2013 @@ -213,7 +213,7 @@ public class TeraSort extends Configured splitPoints = readPartitions(fs, partFile, conf); trie = buildTrie(splitPoints, 0, splitPoints.length, new Text(), 2); } catch (IOException ie) { - throw new IllegalArgumentException("can't read paritions file", ie); + throw new IllegalArgumentException("can't read partitions file", ie); } }