Author: vinayakumarb
Date: Wed Jun 18 05:09:28 2014
New Revision: 1603355

URL: http://svn.apache.org/r1603355
Log:
Merged latest changes from trunk

Modified:
    hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/   (props changed)
    hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/CHANGES.txt   
(contents, props changed)
    
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java
    
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRApp.java
    
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
    
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
    
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/join/CompositeRecordReader.java
    
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/join/WrappedRecordReader.java
    
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/join/CompositeRecordReader.java
    
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/join/WrappedRecordReader.java
    
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/site/markdown/DistCp.md.vm

Propchange: hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project:r1601869-1603354

Modified: hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/CHANGES.txt?rev=1603355&r1=1603354&r2=1603355&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/CHANGES.txt 
(original)
+++ hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/CHANGES.txt Wed 
Jun 18 05:09:28 2014
@@ -77,6 +77,9 @@ Trunk (Unreleased)
     MAPREDUCE-5196. Add bookkeeping for managing checkpoints of task state.
     (Carlo Curino via cdouglas)
 
+    MAPREDUCE-5912. Task.calculateOutputSize does not handle Windows files 
after
+    MAPREDUCE-5196. (Remus Rusanu via cnauroth)
+
   BUG FIXES
 
     MAPREDUCE-5714. Removed forceful JVM exit in shutDownJob.  
@@ -142,9 +145,6 @@ Trunk (Unreleased)
     MAPREDUCE-5867. Fix NPE in KillAMPreemptionPolicy related to 
     ProportionalCapacityPreemptionPolicy (Sunil G via devaraj)
 
-    MAPREDUCE-5898. distcp to support preserving HDFS extended 
attributes(XAttrs)
-    (Yi Liu via umamahesh)
-
 Release 2.5.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES
@@ -259,7 +259,15 @@ Release 2.5.0 - UNRELEASED
     MAPREDUCE-5777. Support utf-8 text with Byte Order Marker.
     (Zhihai Xu via kasha)
 
-Release 2.4.1 - UNRELEASED
+    MAPREDUCE-5898. distcp to support preserving HDFS extended 
attributes(XAttrs)
+    (Yi Liu via umamahesh)
+
+    MAPREDUCE-5920. Add Xattr option in DistCp docs. (Yi Liu via cnauroth)
+
+    MAPREDUCE-5924. Changed TaskAttemptImpl to ignore TA_COMMIT_PENDING event
+    at COMMIT_PENDING state. (Zhijie Shen via jianhe)
+
+Release 2.4.1 - 2014-06-23 
 
   INCOMPATIBLE CHANGES
 

Propchange: 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/CHANGES.txt
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt:r1601869-1603354

Modified: 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java?rev=1603355&r1=1603354&r2=1603355&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java
 (original)
+++ 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java
 Wed Jun 18 05:09:28 2014
@@ -335,6 +335,15 @@ public abstract class TaskAttemptImpl im
      .addTransition(TaskAttemptStateInternal.COMMIT_PENDING,
          TaskAttemptStateInternal.FAIL_CONTAINER_CLEANUP,
          TaskAttemptEventType.TA_TIMED_OUT, CLEANUP_CONTAINER_TRANSITION)
+     // AM is likely to receive duplicate TA_COMMIT_PENDINGs as the task 
attempt
+     // will re-send the commit message until it doesn't encounter any
+     // IOException and succeeds in delivering the commit message.
+     // Ignoring the duplicate commit message is a short-term fix. In long 
term,
+     // we need to make use of retry cache to help this and other MR protocol
+     // APIs that can be considered as @AtMostOnce.
+     .addTransition(TaskAttemptStateInternal.COMMIT_PENDING,
+         TaskAttemptStateInternal.COMMIT_PENDING,
+         TaskAttemptEventType.TA_COMMIT_PENDING)
 
      // Transitions from SUCCESS_CONTAINER_CLEANUP state
      // kill and cleanup the container

Modified: 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRApp.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRApp.java?rev=1603355&r1=1603354&r2=1603355&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRApp.java
 (original)
+++ 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRApp.java
 Wed Jun 18 05:09:28 2014
@@ -112,6 +112,15 @@ public class TestMRApp {
     //wait for first attempt to commit pending
     app.waitForState(attempt, TaskAttemptState.COMMIT_PENDING);
 
+    //re-send the commit pending signal to the task
+    app.getContext().getEventHandler().handle(
+        new TaskAttemptEvent(
+            attempt.getID(),
+            TaskAttemptEventType.TA_COMMIT_PENDING));
+
+    //the task attempt should be still at COMMIT_PENDING
+    app.waitForState(attempt, TaskAttemptState.COMMIT_PENDING);
+
     //send the done signal to the task
     app.getContext().getEventHandler().handle(
         new TaskAttemptEvent(

Modified: 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java?rev=1603355&r1=1603354&r2=1603355&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
 (original)
+++ 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
 Wed Jun 18 05:09:28 2014
@@ -112,7 +112,7 @@ import org.apache.log4j.Level;
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class JobConf extends Configuration {
-  
+
   private static final Log LOG = LogFactory.getLog(JobConf.class);
 
   static{
@@ -882,7 +882,7 @@ public class JobConf extends Configurati
       JobContext.KEY_COMPARATOR, null, RawComparator.class);
     if (theClass != null)
       return ReflectionUtils.newInstance(theClass, this);
-    return 
WritableComparator.get(getMapOutputKeyClass().asSubclass(WritableComparable.class));
+    return 
WritableComparator.get(getMapOutputKeyClass().asSubclass(WritableComparable.class),
 this);
   }
 
   /**

Modified: 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java?rev=1603355&r1=1603354&r2=1603355&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
 (original)
+++ 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
 Wed Jun 18 05:09:28 2014
@@ -1120,8 +1120,8 @@ abstract public class Task implements Wr
     if (isMapTask() && conf.getNumReduceTasks() > 0) {
       try {
         Path mapOutput =  mapOutputFile.getOutputFile();
-        FileSystem fs = mapOutput.getFileSystem(conf);
-        return fs.getFileStatus(mapOutput).getLen();
+        FileSystem localFS = FileSystem.getLocal(conf);
+        return localFS.getFileStatus(mapOutput).getLen();
       } catch (IOException e) {
         LOG.warn ("Could not find output size " , e);
       }

Modified: 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/join/CompositeRecordReader.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/join/CompositeRecordReader.java?rev=1603355&r1=1603354&r2=1603355&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/join/CompositeRecordReader.java
 (original)
+++ 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/join/CompositeRecordReader.java
 Wed Jun 18 05:09:28 2014
@@ -131,7 +131,7 @@ public abstract class CompositeRecordRea
   public void add(ComposableRecordReader<K,? extends V> rr) throws IOException 
{
     kids[rr.id()] = rr;
     if (null == q) {
-      cmp = WritableComparator.get(rr.createKey().getClass());
+      cmp = WritableComparator.get(rr.createKey().getClass(), conf);
       q = new PriorityQueue<ComposableRecordReader<K,?>>(3,
           new Comparator<ComposableRecordReader<K,?>>() {
             public int compare(ComposableRecordReader<K,?> o1,

Modified: 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/join/WrappedRecordReader.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/join/WrappedRecordReader.java?rev=1603355&r1=1603354&r2=1603355&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/join/WrappedRecordReader.java
 (original)
+++ 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/join/WrappedRecordReader.java
 Wed Jun 18 05:09:28 2014
@@ -22,6 +22,8 @@ import java.io.IOException;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.WritableComparator;
@@ -38,7 +40,7 @@ import org.apache.hadoop.mapred.RecordRe
 @InterfaceStability.Stable
 public class WrappedRecordReader<K extends WritableComparable,
                           U extends Writable>
-    implements ComposableRecordReader<K,U> {
+    implements ComposableRecordReader<K,U>, Configurable {
 
   private boolean empty = false;
   private RecordReader<K,U> rr;
@@ -47,6 +49,7 @@ public class WrappedRecordReader<K exten
   private K khead; // key at the top of this RR
   private U vhead; // value assoc with khead
   private WritableComparator cmp;
+  private Configuration conf;
 
   private ResetableIterator<U> vjoin;
 
@@ -55,13 +58,20 @@ public class WrappedRecordReader<K exten
    */
   WrappedRecordReader(int id, RecordReader<K,U> rr,
       Class<? extends WritableComparator> cmpcl) throws IOException {
+    this(id, rr, cmpcl, null);
+  }
+
+  WrappedRecordReader(int id, RecordReader<K,U> rr,
+                      Class<? extends WritableComparator> cmpcl,
+                      Configuration conf) throws IOException {
     this.id = id;
     this.rr = rr;
+    this.conf = (conf == null) ? new Configuration() : conf;
     khead = rr.createKey();
     vhead = rr.createValue();
     try {
       cmp = (null == cmpcl)
-        ? WritableComparator.get(khead.getClass())
+        ? WritableComparator.get(khead.getClass(), this.conf)
         : cmpcl.newInstance();
     } catch (InstantiationException e) {
       throw (IOException)new IOException().initCause(e);
@@ -207,4 +217,13 @@ public class WrappedRecordReader<K exten
     return 42;
   }
 
+  @Override
+  public void setConf(Configuration conf) {
+    this.conf = conf;
+  }
+
+  @Override
+  public Configuration getConf() {
+    return conf;
+  }
 }

Modified: 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/join/CompositeRecordReader.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/join/CompositeRecordReader.java?rev=1603355&r1=1603354&r2=1603355&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/join/CompositeRecordReader.java
 (original)
+++ 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/join/CompositeRecordReader.java
 Wed Jun 18 05:09:28 2014
@@ -109,7 +109,7 @@ public abstract class CompositeRecordRea
         }
         // create priority queue
         if (null == q) {
-          cmp = WritableComparator.get(keyclass);
+          cmp = WritableComparator.get(keyclass, conf);
           q = new PriorityQueue<ComposableRecordReader<K,?>>(3,
                 new Comparator<ComposableRecordReader<K,?>>() {
                   public int compare(ComposableRecordReader<K,?> o1,

Modified: 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/join/WrappedRecordReader.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/join/WrappedRecordReader.java?rev=1603355&r1=1603354&r2=1603355&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/join/WrappedRecordReader.java
 (original)
+++ 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/join/WrappedRecordReader.java
 Wed Jun 18 05:09:28 2014
@@ -92,7 +92,7 @@ public class WrappedRecordReader<K exten
       keyclass = key.getClass().asSubclass(WritableComparable.class);
       valueclass = value.getClass();
       if (cmp == null) {
-        cmp = WritableComparator.get(keyclass);
+        cmp = WritableComparator.get(keyclass, conf);
       }
     }
   }

Modified: 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/site/markdown/DistCp.md.vm
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/site/markdown/DistCp.md.vm?rev=1603355&r1=1603354&r2=1603355&view=diff
==============================================================================
--- 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/site/markdown/DistCp.md.vm
 (original)
+++ 
hadoop/common/branches/HDFS-5442/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/site/markdown/DistCp.md.vm
 Wed Jun 18 05:09:28 2014
@@ -196,7 +196,7 @@ Command Line Options
 
 Flag              | Description                          | Notes
 ----------------- | ------------------------------------ | --------
-`-p[rbugpca]` | Preserve r: replication number b: block size u: user g: group 
p: permission c: checksum-type a: ACL | Modification times are not preserved. 
Also, when `-update` is specified, status updates will **not** be synchronized 
unless the file sizes also differ (i.e. unless the file is re-created). If -pa 
is specified, DistCp preserves the permissions also because ACLs are a 
super-set of permissions.
+`-p[rbugpcax]` | Preserve r: replication number b: block size u: user g: group 
p: permission c: checksum-type a: ACL x: XAttr | Modification times are not 
preserved. Also, when `-update` is specified, status updates will **not** be 
synchronized unless the file sizes also differ (i.e. unless the file is 
re-created). If -pa is specified, DistCp preserves the permissions also because 
ACLs are a super-set of permissions.
 `-i` | Ignore failures | As explained in the Appendix, this option will keep 
more accurate statistics about the copy than the default case. It also 
preserves logs from failed copies, which can be valuable for debugging. 
Finally, a failing map will not cause the job to fail before all splits are 
attempted.
 `-log <logdir>` | Write logs to \<logdir\> | DistCp keeps logs of each file it 
attempts to copy as map output. If a map fails, the log output will not be 
retained if it is re-executed.
 `-m <num_maps>` | Maximum number of simultaneous copies | Specify the number 
of maps to copy data. Note that more maps may not necessarily improve 
throughput.


Reply via email to