Author: wang Date: Thu Oct 24 21:38:25 2013 New Revision: 1535563 URL: http://svn.apache.org/r1535563 Log: merge trunk into HDFS-4949 branch
Modified: hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/ (props changed) hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/CHANGES.txt (contents, props changed) hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/conf/ (props changed) hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml (props changed) hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java Propchange: hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project:r1532946-1535559 Modified: hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/CHANGES.txt?rev=1535563&r1=1535562&r2=1535563&view=diff ============================================================================== --- hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/CHANGES.txt (original) +++ hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/CHANGES.txt Thu Oct 24 21:38:25 2013 @@ -209,6 +209,9 @@ Release 2.2.1 - UNRELEASED MAPREDUCE-5463. Deprecate SLOTS_MILLIS counters (Tzuyoshi Ozawa via Sandy Ryza) + MAPREDUCE-5457. Add a KeyOnlyTextOutputReader to enable streaming to write + out text files without separators (Sandy Ryza) + OPTIMIZATIONS BUG FIXES @@ -222,6 +225,9 @@ Release 2.2.1 - UNRELEASED MAPREDUCE-5518. Fixed typo "can't read paritions file". (Albert Chu via devaraj) + MAPREDUCE-5561. org.apache.hadoop.mapreduce.v2.app.job.impl.TestJobImpl + testcase failing on trunk (Karthik Kambatla via jlowe) + Release 2.2.0 - 2013-10-13 INCOMPATIBLE CHANGES @@ -1497,6 +1503,8 @@ Release 0.23.10 - UNRELEASED MAPREDUCE-5586. TestCopyMapper#testCopyFailOnBlockSizeDifference fails when run from hadoop-tools/hadoop-distcp directory (jeagles) + MAPREDUCE-5587. TestTextOutputFormat fails on JDK7 (jeagles) + Release 0.23.9 - 2013-07-08 INCOMPATIBLE CHANGES Propchange: hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/CHANGES.txt ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt:r1532946-1535559 Propchange: hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/conf/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/conf:r1532946-1535559 Modified: hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java?rev=1535563&r1=1535562&r2=1535563&view=diff ============================================================================== --- hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java (original) +++ hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java Thu Oct 24 21:38:25 2013 @@ -415,7 +415,6 @@ public class TestJobImpl { TaskEventType.T_ATTEMPT_FAILED)); } } - assertJobState(job, JobStateInternal.FAIL_ABORT); dispatcher.await(); //Verify abortJob is called once and the job failed Propchange: hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:r1532946-1535559 Modified: hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java?rev=1535563&r1=1535562&r2=1535563&view=diff ============================================================================== --- hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java (original) +++ hadoop/common/branches/HDFS-4949/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java Thu Oct 24 21:38:25 2013 @@ -18,13 +18,24 @@ package org.apache.hadoop.mapred; -import java.io.*; -import junit.framework.TestCase; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; + +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.compress.DefaultCodec; +import org.apache.hadoop.io.compress.CompressionInputStream; +import org.apache.hadoop.io.NullWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.util.LineReader; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.io.*; - -public class TestTextOutputFormat extends TestCase { +public class TestTextOutputFormat { private static JobConf defaultConf = new JobConf(); private static FileSystem localFs = null; @@ -38,12 +49,13 @@ public class TestTextOutputFormat extend // A random task attempt id for testing. private static String attempt = "attempt_200707121733_0001_m_000000_0"; - private static Path workDir = + private static Path workDir = new Path(new Path( - new Path(System.getProperty("test.build.data", "."), - "data"), + new Path(System.getProperty("test.build.data", "."), + "data"), FileOutputCommitter.TEMP_DIR_NAME), "_" + attempt); + @Test public void testFormat() throws Exception { JobConf job = new JobConf(); job.set(JobContext.TASK_ATTEMPT_ID, attempt); @@ -53,7 +65,7 @@ public class TestTextOutputFormat extend if (!fs.mkdirs(workDir)) { fail("Failed to create output directory"); } - String file = "test.txt"; + String file = "test_format.txt"; // A reporter that does nothing Reporter reporter = Reporter.NULL; @@ -90,10 +102,11 @@ public class TestTextOutputFormat extend expectedOutput.append(key1).append("\n"); expectedOutput.append(key2).append('\t').append(val2).append("\n"); String output = UtilsForTests.slurp(expectedFile); - assertEquals(output, expectedOutput.toString()); + assertEquals(expectedOutput.toString(), output); } + @Test public void testFormatWithCustomSeparator() throws Exception { JobConf job = new JobConf(); String separator = "\u0001"; @@ -105,7 +118,7 @@ public class TestTextOutputFormat extend if (!fs.mkdirs(workDir)) { fail("Failed to create output directory"); } - String file = "test.txt"; + String file = "test_custom.txt"; // A reporter that does nothing Reporter reporter = Reporter.NULL; @@ -142,27 +155,27 @@ public class TestTextOutputFormat extend expectedOutput.append(key1).append("\n"); expectedOutput.append(key2).append(separator).append(val2).append("\n"); String output = UtilsForTests.slurp(expectedFile); - assertEquals(output, expectedOutput.toString()); + assertEquals(expectedOutput.toString(), output); } + /** * test compressed file * @throws IOException */ - public void testCompress() throws IOException{ + @Test + public void testCompress() throws IOException { JobConf job = new JobConf(); - String separator = "\u0001"; - job.set("mapreduce.output.textoutputformat.separator", separator); job.set(JobContext.TASK_ATTEMPT_ID, attempt); job.set(org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.COMPRESS,"true"); - + FileOutputFormat.setOutputPath(job, workDir.getParent().getParent()); FileOutputFormat.setWorkOutputPath(job, workDir); FileSystem fs = workDir.getFileSystem(job); if (!fs.mkdirs(workDir)) { fail("Failed to create output directory"); } - String file = "test.txt"; + String file = "test_compress.txt"; // A reporter that does nothing Reporter reporter = Reporter.NULL; @@ -189,16 +202,30 @@ public class TestTextOutputFormat extend } finally { theRecordWriter.close(reporter); } - File expectedFile = new File(new Path(workDir, file).toString()); StringBuffer expectedOutput = new StringBuffer(); - expectedOutput.append(key1).append(separator).append(val1).append("\n"); + expectedOutput.append(key1).append("\t").append(val1).append("\n"); expectedOutput.append(val1).append("\n"); expectedOutput.append(val2).append("\n"); expectedOutput.append(key2).append("\n"); expectedOutput.append(key1).append("\n"); - expectedOutput.append(key2).append(separator).append(val2).append("\n"); - String output = UtilsForTests.slurp(expectedFile); - assertEquals(output, expectedOutput.toString()); + expectedOutput.append(key2).append("\t").append(val2).append("\n"); + + DefaultCodec codec = new DefaultCodec(); + codec.setConf(job); + Path expectedFile = new Path(workDir, file + codec.getDefaultExtension()); + final FileInputStream istream = new FileInputStream(expectedFile.toString()); + CompressionInputStream cistream = codec.createInputStream(istream); + LineReader reader = new LineReader(cistream); + + String output = ""; + Text out = new Text(); + while (reader.readLine(out) > 0) { + output += out; + output += "\n"; + } + reader.close(); + + assertEquals(expectedOutput.toString(), output); } public static void main(String[] args) throws Exception { new TestTextOutputFormat().testFormat();