Author: arp Date: Tue Oct 29 21:05:15 2013 New Revision: 1536890 URL: http://svn.apache.org/r1536890 Log: Merging r1536573 through r1536889 from trunk to branch HDFS-2832
Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/ (props changed) hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/CHANGES.txt (contents, props changed) hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml (contents, props changed) hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineTextInputFormat.java hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java Propchange: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project:r1535792-1536181,1536559-1536571,1536573-1536889 Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/CHANGES.txt?rev=1536890&r1=1536889&r2=1536890&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/CHANGES.txt (original) +++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/CHANGES.txt Tue Oct 29 21:05:15 2013 @@ -212,6 +212,9 @@ Release 2.2.1 - UNRELEASED MAPREDUCE-5457. Add a KeyOnlyTextOutputReader to enable streaming to write out text files without separators (Sandy Ryza) + MAPREDUCE-5596. Allow configuring the number of threads used to serve + shuffle connections (Sandy Ryza via jlowe) + OPTIMIZATIONS MAPREDUCE-4680. Job history cleaner should only check timestamps of files in @@ -231,6 +234,9 @@ Release 2.2.1 - UNRELEASED MAPREDUCE-5561. org.apache.hadoop.mapreduce.v2.app.job.impl.TestJobImpl testcase failing on trunk (Karthik Kambatla via jlowe) + MAPREDUCE-5598. TestUserDefinedCounters.testMapReduceJob is flakey + (Robert Kanter via jlowe) + Release 2.2.0 - 2013-10-13 INCOMPATIBLE CHANGES Propchange: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/CHANGES.txt ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt:r1535792-1536181,1536559-1536571,1536573-1536889 Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml?rev=1536890&r1=1536889&r2=1536890&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml (original) +++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml Tue Oct 29 21:05:15 2013 @@ -305,6 +305,16 @@ </property> <property> + <name>mapreduce.shuffle.max.threads</name> + <value>0</value> + <description>Max allowed threads for serving shuffle connections. Set to zero + to indicate the default of 2 times the number of available + processors (as reported by Runtime.availableProcessors()). Netty is used to + serve requests, so a thread is not needed for each connection. + </description> +</property> + +<property> <name>mapreduce.reduce.markreset.buffer.percent</name> <value>0.0</value> <description>The percentage of memory -relative to the maximum heap size- to Propchange: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:r1535792-1536571,1536573-1536889 Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java?rev=1536890&r1=1536889&r2=1536890&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java Tue Oct 29 21:05:15 2013 @@ -40,7 +40,8 @@ public class TestUserDefinedCounters ext private static String TEST_ROOT_DIR = new File(System.getProperty("test.build.data", "/tmp")).toURI() - .toString().replace(' ', '+'); + .toString().replace(' ', '+') + + "/" + TestUserDefinedCounters.class.getName(); private final Path INPUT_DIR = new Path(TEST_ROOT_DIR + "/input"); private final Path OUTPUT_DIR = new Path(TEST_ROOT_DIR + "/out"); @@ -61,7 +62,7 @@ public class TestUserDefinedCounters ext } private void cleanAndCreateInput(FileSystem fs) throws IOException { - fs.delete(INPUT_FILE, true); + fs.delete(INPUT_DIR, true); fs.delete(OUTPUT_DIR, true); OutputStream os = fs.create(INPUT_FILE); Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineTextInputFormat.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineTextInputFormat.java?rev=1536890&r1=1536889&r2=1536890&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineTextInputFormat.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineTextInputFormat.java Tue Oct 29 21:05:15 2013 @@ -72,7 +72,7 @@ public class TestCombineTextInputFormat new Path(new Path(System.getProperty("test.build.data", "."), "data"), "TestCombineTextInputFormat"); - @Test(timeout=10000) + @Test//(timeout=10000) public void testFormat() throws Exception { Job job = Job.getInstance(new Configuration(defaultConf)); Modified: hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java?rev=1536890&r1=1536889&r2=1536890&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java Tue Oct 29 21:05:15 2013 @@ -163,6 +163,10 @@ public class ShuffleHandler extends Auxi public static final String MAX_SHUFFLE_CONNECTIONS = "mapreduce.shuffle.max.connections"; public static final int DEFAULT_MAX_SHUFFLE_CONNECTIONS = 0; // 0 implies no limit + + public static final String MAX_SHUFFLE_THREADS = "mapreduce.shuffle.max.threads"; + // 0 implies Netty default of 2 * number of available processors + public static final int DEFAULT_MAX_SHUFFLE_THREADS = 0; @Metrics(about="Shuffle output metrics", context="mapred") static class ShuffleMetrics implements ChannelFutureListener { @@ -282,6 +286,11 @@ public class ShuffleHandler extends Auxi maxShuffleConnections = conf.getInt(MAX_SHUFFLE_CONNECTIONS, DEFAULT_MAX_SHUFFLE_CONNECTIONS); + int maxShuffleThreads = conf.getInt(MAX_SHUFFLE_THREADS, + DEFAULT_MAX_SHUFFLE_THREADS); + if (maxShuffleThreads == 0) { + maxShuffleThreads = 2 * Runtime.getRuntime().availableProcessors(); + } ThreadFactory bossFactory = new ThreadFactoryBuilder() .setNameFormat("ShuffleHandler Netty Boss #%d") @@ -292,7 +301,8 @@ public class ShuffleHandler extends Auxi selector = new NioServerSocketChannelFactory( Executors.newCachedThreadPool(bossFactory), - Executors.newCachedThreadPool(workerFactory)); + Executors.newCachedThreadPool(workerFactory), + maxShuffleThreads); super.serviceInit(new Configuration(conf)); }