Modified: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraOutputFormat.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraOutputFormat.java?rev=1371518&r1=1371517&r2=1371518&view=diff ============================================================================== --- hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraOutputFormat.java (original) +++ hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraOutputFormat.java Thu Aug 9 22:29:36 2012 @@ -31,6 +31,7 @@ import org.apache.hadoop.mapreduce.Recor import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; +import org.apache.hadoop.mapreduce.security.TokenCache; /** * An output format that writes the key and value appended together. @@ -85,6 +86,10 @@ public class TeraOutputFormat extends Fi if (outDir == null) { throw new InvalidJobConfException("Output directory not set in JobConf."); } + + // get delegation token for outDir's file system + TokenCache.obtainTokensForNamenodes(job.getCredentials(), + new Path[] { outDir }, job.getConfiguration()); } public RecordWriter<Text,Text> getRecordWriter(TaskAttemptContext job
Modified: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraSort.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraSort.java?rev=1371518&r1=1371517&r2=1371518&view=diff ============================================================================== --- hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraSort.java (original) +++ hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraSort.java Thu Aug 9 22:29:36 2012 @@ -305,8 +305,7 @@ public class TeraSort extends Configured LOG.error(e.getMessage()); return -1; } - job.addCacheFile(partitionUri); - job.createSymlink(); + job.addCacheFile(partitionUri); long end = System.currentTimeMillis(); System.out.println("Spent " + (end - start) + "ms computing partitions."); job.setPartitionerClass(TotalOrderPartitioner.class); Modified: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/pom.xml URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/pom.xml?rev=1371518&r1=1371517&r2=1371518&view=diff ============================================================================== --- hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/pom.xml (original) +++ hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/pom.xml Thu Aug 9 22:29:36 2012 @@ -39,9 +39,8 @@ </properties> <modules> - <module>hadoop-yarn</module> - <module>hadoop-mapreduce-client</module> - <module>hadoop-mapreduce-examples</module> + <module>hadoop-mapreduce-client</module> + <module>hadoop-mapreduce-examples</module> </modules> <dependencies> @@ -162,7 +161,7 @@ <dependency> <groupId>org.hsqldb</groupId> <artifactId>hsqldb</artifactId> - <version>2.0.0</version> + <scope>compile</scope> </dependency> </dependencies> Propchange: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/c++/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/c++:r1367365-1371513 Propchange: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/contrib/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib:r1367365-1371513 Propchange: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/contrib/block_forensics/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/block_forensics:r1367365-1371513 Propchange: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/contrib/build-contrib.xml ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/build-contrib.xml:r1367365-1371513 Propchange: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/contrib/build.xml ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/build.xml:r1367365-1371513 Propchange: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/contrib/data_join/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/data_join:r1367365-1371513 Propchange: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/contrib/eclipse-plugin/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/eclipse-plugin:r1367365-1371513 Propchange: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/contrib/index/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/index:r1367365-1371513 Propchange: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/contrib/vaidya/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/vaidya:r1367365-1371513 Propchange: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/examples/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/examples:r1367365-1371513 Propchange: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/java/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/java:r1367365-1371513 Propchange: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred:r1367365-1371513 Propchange: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/fs/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/fs:r1367365-1371513 Propchange: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/hdfs/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/hdfs:r1367365-1371513 Propchange: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/ipc/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/ipc:r1367365-1371513 Modified: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/SortValidator.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/SortValidator.java?rev=1371518&r1=1371517&r2=1371518&view=diff ============================================================================== --- hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/SortValidator.java (original) +++ hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/SortValidator.java Thu Aug 9 22:29:36 2012 @@ -33,7 +33,6 @@ import org.apache.hadoop.io.WritableComp import org.apache.hadoop.io.WritableComparator; import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.mapred.lib.HashPartitioner; -import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.fs.*; @@ -345,7 +344,8 @@ public class SortValidator extends Confi FileInputFormat.setInputPaths(jobConf, sortInput); FileInputFormat.addInputPath(jobConf, sortOutput); - Path outputPath = new Path("/tmp/sortvalidate/recordstatschecker"); + Path outputPath = new Path(new Path("/tmp", + "sortvalidate"), UUID.randomUUID().toString()); if (defaultfs.exists(outputPath)) { defaultfs.delete(outputPath, true); } @@ -365,31 +365,44 @@ public class SortValidator extends Confi Date startTime = new Date(); System.out.println("Job started: " + startTime); JobClient.runJob(jobConf); - Date end_time = new Date(); - System.out.println("Job ended: " + end_time); - System.out.println("The job took " + - (end_time.getTime() - startTime.getTime()) /1000 + " seconds."); - - // Check to ensure that the statistics of the - // framework's sort-input and sort-output match - SequenceFile.Reader stats = new SequenceFile.Reader(defaultfs, - new Path(outputPath, "part-00000"), defaults); - IntWritable k1 = new IntWritable(); - IntWritable k2 = new IntWritable(); - RecordStatsWritable v1 = new RecordStatsWritable(); - RecordStatsWritable v2 = new RecordStatsWritable(); - if (!stats.next(k1, v1)) { - throw new IOException("Failed to read record #1 from reduce's output"); - } - if (!stats.next(k2, v2)) { - throw new IOException("Failed to read record #2 from reduce's output"); - } - - if ((v1.getBytes() != v2.getBytes()) || (v1.getRecords() != v2.getRecords()) || - v1.getChecksum() != v2.getChecksum()) { - throw new IOException("(" + - v1.getBytes() + ", " + v1.getRecords() + ", " + v1.getChecksum() + ") v/s (" + - v2.getBytes() + ", " + v2.getRecords() + ", " + v2.getChecksum() + ")"); + try { + Date end_time = new Date(); + System.out.println("Job ended: " + end_time); + System.out.println("The job took " + + (end_time.getTime() - startTime.getTime()) /1000 + " seconds."); + + // Check to ensure that the statistics of the + // framework's sort-input and sort-output match + SequenceFile.Reader stats = new SequenceFile.Reader(defaultfs, + new Path(outputPath, "part-00000"), defaults); + try { + IntWritable k1 = new IntWritable(); + IntWritable k2 = new IntWritable(); + RecordStatsWritable v1 = new RecordStatsWritable(); + RecordStatsWritable v2 = new RecordStatsWritable(); + if (!stats.next(k1, v1)) { + throw new IOException( + "Failed to read record #1 from reduce's output"); + } + if (!stats.next(k2, v2)) { + throw new IOException( + "Failed to read record #2 from reduce's output"); + } + + if ((v1.getBytes() != v2.getBytes()) || + (v1.getRecords() != v2.getRecords()) || + v1.getChecksum() != v2.getChecksum()) { + throw new IOException("(" + + v1.getBytes() + ", " + v1.getRecords() + ", " + v1.getChecksum() + + ") v/s (" + + v2.getBytes() + ", " + v2.getRecords() + ", " + v2.getChecksum() + + ")"); + } + } finally { + stats.close(); + } + } finally { + defaultfs.delete(outputPath, true); } } Propchange: hadoop/common/branches/HDFS-3077/hadoop-mapreduce-project/src/webapps/job/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/webapps/job:r1367365-1371513