Author: szetszwo
Date: Tue Mar 25 18:09:48 2014
New Revision: 1581440

URL: http://svn.apache.org/r1581440
Log:
svn merge -c 1581437 from trunk for HADOOP-10426. Declare CreateOpts.getOpt(..) 
with generic type argument, removes unused FileContext.getFileStatus(..) and 
fixes various javac warnings.

Modified:
    hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/   
(props changed)
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
  (contents, props changed)
    hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/   
(props changed)
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/
   (props changed)
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFactory.java
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java

Propchange: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common:r1581437

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1581440&r1=1581439&r2=1581440&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Tue Mar 25 18:09:48 2014
@@ -18,6 +18,10 @@ Release 2.5.0 - UNRELEASED
     HADOOP-10418. SaslRpcClient should not assume that remote principals are in
     the default_realm. (atm)
 
+    HADOOP-10426. Declare CreateOpts.getOpt(..) with generic type argument,
+    removes unused FileContext.getFileStatus(..) and fixes various javac
+    warnings.  (szetszwo)
+
 Release 2.4.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1581437

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src:r1581437

Propchange: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged 
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1581437

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java?rev=1581440&r1=1581439&r2=1581440&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
 Tue Mar 25 18:09:48 2014
@@ -750,13 +750,12 @@ public abstract class AbstractFileSystem
    * Partially resolves the path. This is used during symlink resolution in
    * {@link FSLinkResolver}, and differs from the similarly named method
    * {@link FileContext#getLinkTarget(Path)}.
+   * @throws IOException subclass implementations may throw IOException 
    */
   public Path getLinkTarget(final Path f) throws IOException {
-    /* We should never get here. Any file system that threw an
-     * UnresolvedLinkException, causing this function to be called,
-     * needs to override this method.
-     */
-    throw new AssertionError();
+    throw new AssertionError("Implementation Error: " + getClass()
+        + " that threw an UnresolvedLinkException, causing this method to be"
+        + " called, needs to override this method.");
   }
     
   /**

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java?rev=1581440&r1=1581439&r2=1581440&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java
 Tue Mar 25 18:09:48 2014
@@ -33,7 +33,7 @@ import org.apache.hadoop.util.IdentityHa
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class FSDataInputStream extends DataInputStream
-    implements Seekable, PositionedReadable, Closeable, 
+    implements Seekable, PositionedReadable, 
       ByteBufferReadable, HasFileDescriptor, CanSetDropBehind, CanSetReadahead,
       HasEnhancedByteBufferAccess {
   /**
@@ -44,8 +44,7 @@ public class FSDataInputStream extends D
     extendedReadBuffers
       = new IdentityHashStore<ByteBuffer, ByteBufferPool>(0);
 
-  public FSDataInputStream(InputStream in)
-    throws IOException {
+  public FSDataInputStream(InputStream in) {
     super(in);
     if( !(in instanceof Seekable) || !(in instanceof PositionedReadable) ) {
       throw new IllegalArgumentException(

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java?rev=1581440&r1=1581439&r2=1581440&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
 Tue Mar 25 18:09:48 2014
@@ -662,8 +662,7 @@ public final class FileContext {
     // If not, add a default Perms and apply umask;
     // AbstractFileSystem#create
 
-    CreateOpts.Perms permOpt = 
-      (CreateOpts.Perms) CreateOpts.getOpt(CreateOpts.Perms.class, opts);
+    CreateOpts.Perms permOpt = CreateOpts.getOpt(CreateOpts.Perms.class, opts);
     FsPermission permission = (permOpt != null) ? permOpt.getValue() :
                                       FILE_DEFAULT_PERM;
     permission = permission.applyUMask(umask);
@@ -1535,40 +1534,6 @@ public final class FileContext {
     }
     
     /**
-     * Return a list of file status objects that corresponds to supplied paths
-     * excluding those non-existent paths.
-     * 
-     * @param paths list of paths we want information from
-     *
-     * @return a list of FileStatus objects
-     *
-     * @throws AccessControlException If access is denied
-     * @throws IOException If an I/O error occurred
-     * 
-     * Exceptions applicable to file systems accessed over RPC:
-     * @throws RpcClientException If an exception occurred in the RPC client
-     * @throws RpcServerException If an exception occurred in the RPC server
-     * @throws UnexpectedServerException If server implementation throws 
-     *           undeclared exception to RPC server
-     */
-    private FileStatus[] getFileStatus(Path[] paths)
-        throws AccessControlException, IOException {
-      if (paths == null) {
-        return null;
-      }
-      ArrayList<FileStatus> results = new ArrayList<FileStatus>(paths.length);
-      for (int i = 0; i < paths.length; i++) {
-        try {
-          results.add(FileContext.this.getFileStatus(paths[i]));
-        } catch (FileNotFoundException fnfe) {
-          // ignoring 
-        }
-      }
-      return results.toArray(new FileStatus[results.size()]);
-    }
-    
-    
-    /**
      * Return the {@link ContentSummary} of path f.
      * @param f path
      *

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java?rev=1581440&r1=1581439&r2=1581440&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java
 Tue Mar 25 18:09:48 2014
@@ -53,8 +53,7 @@ public abstract class FilterFs extends A
     return myFs;
   }
   
-  protected FilterFs(AbstractFileSystem fs) throws IOException,
-      URISyntaxException {
+  protected FilterFs(AbstractFileSystem fs) throws URISyntaxException {
     super(fs.getUri(), fs.getUri().getScheme(),
         fs.getUri().getAuthority() != null, fs.getUriDefaultPort());
     myFs = fs;

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java?rev=1581440&r1=1581439&r2=1581440&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java
 Tue Mar 25 18:09:48 2014
@@ -150,21 +150,25 @@ public final class Options {
     
     /**
      * Get an option of desired type
-     * @param theClass is the desired class of the opt
+     * @param clazz is the desired class of the opt
      * @param opts - not null - at least one opt must be passed
      * @return an opt from one of the opts of type theClass.
      *   returns null if there isn't any
      */
-    protected static CreateOpts getOpt(Class<? extends CreateOpts> theClass,  
CreateOpts ...opts) {
+    static <T extends CreateOpts> T getOpt(Class<T> clazz, CreateOpts... opts) 
{
       if (opts == null) {
         throw new IllegalArgumentException("Null opt");
       }
-      CreateOpts result = null;
+      T result = null;
       for (int i = 0; i < opts.length; ++i) {
-        if (opts[i].getClass() == theClass) {
-          if (result != null) 
-            throw new IllegalArgumentException("multiple blocksize varargs");
-          result = opts[i];
+        if (opts[i].getClass() == clazz) {
+          if (result != null) {
+            throw new IllegalArgumentException("multiple opts varargs: " + 
clazz);
+          }
+
+          @SuppressWarnings("unchecked")
+          T t = (T)opts[i];
+          result = t;
         }
       }
       return result;
@@ -175,14 +179,16 @@ public final class Options {
      * @param opts  - the option is set into this array of opts
      * @return updated CreateOpts[] == opts + newValue
      */
-    protected static <T extends CreateOpts> CreateOpts[] setOpt(T newValue,
-        CreateOpts ...opts) {
+    static <T extends CreateOpts> CreateOpts[] setOpt(final T newValue,
+        final CreateOpts... opts) {
+      final Class<?> clazz = newValue.getClass();
       boolean alreadyInOpts = false;
       if (opts != null) {
         for (int i = 0; i < opts.length; ++i) {
-          if (opts[i].getClass() == newValue.getClass()) {
-            if (alreadyInOpts) 
-              throw new IllegalArgumentException("multiple opts varargs");
+          if (opts[i].getClass() == clazz) {
+            if (alreadyInOpts) {
+              throw new IllegalArgumentException("multiple opts varargs: " + 
clazz);
+            }
             alreadyInOpts = true;
             opts[i] = newValue;
           }
@@ -190,9 +196,12 @@ public final class Options {
       }
       CreateOpts[] resultOpt = opts;
       if (!alreadyInOpts) { // no newValue in opt
-        CreateOpts[] newOpts = new CreateOpts[opts.length + 1];
-        System.arraycopy(opts, 0, newOpts, 0, opts.length);
-        newOpts[opts.length] = newValue;
+        final int oldLength = opts == null? 0: opts.length;
+        CreateOpts[] newOpts = new CreateOpts[oldLength + 1];
+        if (oldLength > 0) {
+          System.arraycopy(opts, 0, newOpts, 0, oldLength);
+        }
+        newOpts[oldLength] = newValue;
         resultOpt = newOpts;
       }
       return resultOpt;
@@ -273,50 +282,29 @@ public final class Options {
      */
     public static ChecksumOpt processChecksumOpt(ChecksumOpt defaultOpt, 
         ChecksumOpt userOpt, int userBytesPerChecksum) {
-      // The following is done to avoid unnecessary creation of new objects.
-      // tri-state variable: 0 default, 1 userBytesPerChecksum, 2 userOpt
-      short whichSize;
-      // true default, false userOpt
-      boolean useDefaultType;
-      
-      //  bytesPerChecksum - order of preference
-      //    user specified value in bytesPerChecksum
-      //    user specified value in checksumOpt
-      //    default.
-      if (userBytesPerChecksum > 0) {
-        whichSize = 1; // userBytesPerChecksum
-      } else if (userOpt != null && userOpt.getBytesPerChecksum() > 0) {
-        whichSize = 2; // userOpt
-      } else {
-        whichSize = 0; // default
-      }
-
-      // checksum type - order of preference
-      //   user specified value in checksumOpt
-      //   default.
-      if (userOpt != null &&
-            userOpt.getChecksumType() != DataChecksum.Type.DEFAULT) {
+      final boolean useDefaultType;
+      final DataChecksum.Type type;
+      if (userOpt != null 
+          && userOpt.getChecksumType() != DataChecksum.Type.DEFAULT) {
         useDefaultType = false;
+        type = userOpt.getChecksumType();
       } else {
         useDefaultType = true;
+        type = defaultOpt.getChecksumType();
       }
 
-      // Short out the common and easy cases
-      if (whichSize == 0 && useDefaultType) {
-        return defaultOpt;
-      } else if (whichSize == 2 && !useDefaultType) {
-        return userOpt;
-      }
-
-      // Take care of the rest of combinations
-      DataChecksum.Type type = useDefaultType ? defaultOpt.getChecksumType() :
-          userOpt.getChecksumType();
-      if (whichSize == 0) {
-        return new ChecksumOpt(type, defaultOpt.getBytesPerChecksum());
-      } else if (whichSize == 1) {
+      //  bytesPerChecksum - order of preference
+      //    user specified value in bytesPerChecksum
+      //    user specified value in checksumOpt
+      //    default.
+      if (userBytesPerChecksum > 0) {
         return new ChecksumOpt(type, userBytesPerChecksum);
+      } else if (userOpt != null && userOpt.getBytesPerChecksum() > 0) {
+        return !useDefaultType? userOpt
+            : new ChecksumOpt(type, userOpt.getBytesPerChecksum());
       } else {
-        return new ChecksumOpt(type, userOpt.getBytesPerChecksum());
+        return useDefaultType? defaultOpt
+            : new ChecksumOpt(type, defaultOpt.getBytesPerChecksum());
       }
     }
 

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFactory.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFactory.java?rev=1581440&r1=1581439&r2=1581440&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFactory.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFactory.java
 Tue Mar 25 18:09:48 2014
@@ -24,7 +24,6 @@ import java.util.Map;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -35,7 +34,7 @@ import org.apache.hadoop.util.StringUtil
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 
-public class CommandFactory extends Configured implements Configurable {
+public class CommandFactory extends Configured {
   private Map<String, Class<? extends Command>> classMap =
     new HashMap<String, Class<? extends Command>>();
 

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java?rev=1581440&r1=1581439&r2=1581440&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
 Tue Mar 25 18:09:48 2014
@@ -18,16 +18,20 @@
 
 package org.apache.hadoop.fs.shell;
 
-import java.io.*;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
 import java.net.URI;
 import java.net.URISyntaxException;
-import java.util.Arrays;
 import java.util.LinkedList;
 import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.fs.*;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathIsDirectoryException;
 import org.apache.hadoop.io.IOUtils;
 
 /** Various commands for copy files */

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java?rev=1581440&r1=1581439&r2=1581440&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
 Tue Mar 25 18:09:48 2014
@@ -18,12 +18,12 @@
 package org.apache.hadoop.fs.shell;
 
 import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.InputStream;
 import java.io.IOException;
+import java.io.InputStream;
 import java.util.LinkedList;
 import java.util.zip.GZIPInputStream;
 
+import org.apache.avro.Schema;
 import org.apache.avro.file.DataFileReader;
 import org.apache.avro.file.FileReader;
 import org.apache.avro.generic.GenericDatumReader;
@@ -31,7 +31,6 @@ import org.apache.avro.generic.GenericDa
 import org.apache.avro.io.DatumWriter;
 import org.apache.avro.io.EncoderFactory;
 import org.apache.avro.io.JsonEncoder;
-import org.apache.avro.Schema;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -252,7 +251,7 @@ class Display extends FsCommand {
     private int pos;
     private byte[] buffer;
     private ByteArrayOutputStream output;
-    private FileReader fileReader;
+    private FileReader<?> fileReader;
     private DatumWriter<Object> writer;
     private JsonEncoder encoder;
 

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java?rev=1581440&r1=1581439&r2=1581440&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
 Tue Mar 25 18:09:48 2014
@@ -570,7 +570,7 @@ public class PathData implements Compara
 
   @Override
   public int compareTo(PathData o) {
-    return path.compareTo(((PathData)o).path);
+    return path.compareTo(o.path);
   }
   
   @Override

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java?rev=1581440&r1=1581439&r2=1581440&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
 Tue Mar 25 18:09:48 2014
@@ -1091,12 +1091,7 @@ public class ActiveStandbyElector implem
   }
 
   private static boolean shouldRetry(Code code) {
-    switch (code) {
-    case CONNECTIONLOSS:
-    case OPERATIONTIMEOUT:
-      return true;
-    }
-    return false;
+    return code == Code.CONNECTIONLOSS || code == Code.OPERATIONTIMEOUT;
   }
   
   @Override

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java?rev=1581440&r1=1581439&r2=1581440&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java
 Tue Mar 25 18:09:48 2014
@@ -19,8 +19,6 @@ package org.apache.hadoop.http;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 
 /**
  * Singleton to get access to Http related configuration.

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java?rev=1581440&r1=1581439&r2=1581440&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
 Tue Mar 25 18:09:48 2014
@@ -434,7 +434,7 @@ public final class HttpServer2 implement
    * provided. This wrapper and all subclasses must create at least one
    * listener.
    */
-  public Connector createBaseListener(Configuration conf) throws IOException {
+  public Connector createBaseListener(Configuration conf) {
     return HttpServer2.createDefaultChannelConnector();
   }
 
@@ -527,8 +527,7 @@ public final class HttpServer2 implement
     addServlet("conf", "/conf", ConfServlet.class);
   }
 
-  public void addContext(Context ctxt, boolean isFiltered)
-      throws IOException {
+  public void addContext(Context ctxt, boolean isFiltered) {
     webServer.addHandler(ctxt);
     addNoCacheFilter(webAppContext);
     defaultContexts.put(ctxt, isFiltered);

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java?rev=1581440&r1=1581439&r2=1581440&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java
 Tue Mar 25 18:09:48 2014
@@ -48,11 +48,11 @@ public abstract class AbstractMapWritabl
   
   /* Class to id mappings */
   @VisibleForTesting
-  Map<Class, Byte> classToIdMap = new ConcurrentHashMap<Class, Byte>();
+  Map<Class<?>, Byte> classToIdMap = new ConcurrentHashMap<Class<?>, Byte>();
   
   /* Id to Class mappings */
   @VisibleForTesting
-  Map<Byte, Class> idToClassMap = new ConcurrentHashMap<Byte, Class>();
+  Map<Byte, Class<?>> idToClassMap = new ConcurrentHashMap<Byte, Class<?>>();
   
   /* The number of new classes (those not established by the constructor) */
   private volatile byte newClasses = 0;
@@ -65,7 +65,7 @@ public abstract class AbstractMapWritabl
   /**
    * Used to add "predefined" classes and by Writable to copy "new" classes.
    */
-  private synchronized void addToMap(Class clazz, byte id) {
+  private synchronized void addToMap(Class<?> clazz, byte id) {
     if (classToIdMap.containsKey(clazz)) {
       byte b = classToIdMap.get(clazz);
       if (b != id) {
@@ -74,7 +74,7 @@ public abstract class AbstractMapWritabl
       }
     }
     if (idToClassMap.containsKey(id)) {
-      Class c = idToClassMap.get(id);
+      Class<?> c = idToClassMap.get(id);
       if (!c.equals(clazz)) {
         throw new IllegalArgumentException("Id " + id + " exists but maps to " 
+
             c.getName() + " and not " + clazz.getName());
@@ -85,7 +85,7 @@ public abstract class AbstractMapWritabl
   }
   
   /** Add a Class to the maps if it is not already present. */ 
-  protected synchronized void addToMap(Class clazz) {
+  protected synchronized void addToMap(Class<?> clazz) {
     if (classToIdMap.containsKey(clazz)) {
       return;
     }
@@ -98,12 +98,12 @@ public abstract class AbstractMapWritabl
   }
 
   /** @return the Class class for the specified id */
-  protected Class getClass(byte id) {
+  protected Class<?> getClass(byte id) {
     return idToClassMap.get(id);
   }
 
   /** @return the id for the specified Class */
-  protected byte getId(Class clazz) {
+  protected byte getId(Class<?> clazz) {
     return classToIdMap.containsKey(clazz) ? classToIdMap.get(clazz) : -1;
   }
 

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java?rev=1581440&r1=1581439&r2=1581440&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java
 Tue Mar 25 18:09:48 2014
@@ -82,7 +82,7 @@ public class MapWritable extends Abstrac
     }
 
     if (obj instanceof MapWritable) {
-      Map map = (Map) obj;
+      MapWritable map = (MapWritable) obj;
       if (size() != map.size()) {
         return false;
       }
@@ -114,7 +114,6 @@ public class MapWritable extends Abstrac
   }
 
   @Override
-  @SuppressWarnings("unchecked")
   public Writable put(Writable key, Writable value) {
     addToMap(key.getClass());
     addToMap(value.getClass());
@@ -163,7 +162,6 @@ public class MapWritable extends Abstrac
     }
   }
 
-  @SuppressWarnings("unchecked")
   @Override
   public void readFields(DataInput in) throws IOException {
     super.readFields(in);

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java?rev=1581440&r1=1581439&r2=1581440&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java
 Tue Mar 25 18:09:48 2014
@@ -77,8 +77,7 @@ public final class FileContextTestHelper
   // the getAbsolutexxx method is needed because the root test dir
   // can be messed up by changing the working dir.
 
-  public String getAbsoluteTestRootDir(FileContext fc)
-      throws IOException {
+  public String getAbsoluteTestRootDir(FileContext fc) {
     if (absTestRootDir == null) {
       if (new Path(testRootDir).isAbsolute()) {
         absTestRootDir = testRootDir;
@@ -90,12 +89,11 @@ public final class FileContextTestHelper
     return absTestRootDir;
   }
   
-  public Path getAbsoluteTestRootPath(FileContext fc) throws IOException {
+  public Path getAbsoluteTestRootPath(FileContext fc) {
     return fc.makeQualified(new Path(getAbsoluteTestRootDir(fc)));
   }
 
-  public Path getDefaultWorkingDirectory(FileContext fc)
-      throws IOException {
+  public Path getDefaultWorkingDirectory(FileContext fc) {
     return getTestRootPath(fc, "/user/" + System.getProperty("user.name"))
         .makeQualified(fc.getDefaultFileSystem().getUri(),
             fc.getWorkingDirectory());
@@ -106,8 +104,7 @@ public final class FileContextTestHelper
    */
   public static long createFile(FileContext fc, Path path, int numBlocks,
       CreateOpts... options) throws IOException {
-    BlockSize blockSizeOpt = 
-      (BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
+    BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, 
options);
     long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
         : DEFAULT_BLOCK_SIZE;
     FSDataOutputStream out = 
@@ -146,8 +143,7 @@ public final class FileContextTestHelper
 
   public static void appendToFile(FileContext fc, Path path, int numBlocks,
       CreateOpts... options) throws IOException {
-    BlockSize blockSizeOpt =
-      (BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
+    BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, 
options);
     long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
         : DEFAULT_BLOCK_SIZE;
     FSDataOutputStream out;
@@ -203,14 +199,11 @@ public final class FileContextTestHelper
   }
 
   public FileStatus containsPath(FileContext fc, Path path,
-      FileStatus[] dirList)
-    throws IOException {
+      FileStatus[] dirList) {
     return containsPath(getTestRootPath(fc, path.toString()), dirList);
   }
   
-  public static FileStatus containsPath(Path path,
-      FileStatus[] dirList)
-    throws IOException {
+  public static FileStatus containsPath(Path path, FileStatus[] dirList) {
     for(int i = 0; i < dirList.length; i ++) { 
       if (path.equals(dirList[i].getPath()))
         return dirList[i];
@@ -219,8 +212,7 @@ public final class FileContextTestHelper
   }
   
   public FileStatus containsPath(FileContext fc, String path,
-      FileStatus[] dirList)
-     throws IOException {
+      FileStatus[] dirList) {
     return containsPath(fc, new Path(path), dirList);
   }
   

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java?rev=1581440&r1=1581439&r2=1581440&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java
 Tue Mar 25 18:09:48 2014
@@ -62,8 +62,7 @@ public final class FileContextTestWrappe
    */
   public long createFile(Path path, int numBlocks, CreateOpts... options)
       throws IOException {
-    BlockSize blockSizeOpt =
-      (BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
+    BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, 
options);
     long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
         : DEFAULT_BLOCK_SIZE;
     FSDataOutputStream out =
@@ -100,8 +99,7 @@ public final class FileContextTestWrappe
 
   public void appendToFile(Path path, int numBlocks, CreateOpts... options)
       throws IOException {
-    BlockSize blockSizeOpt =
-      (BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
+    BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, 
options);
     long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
         : DEFAULT_BLOCK_SIZE;
     FSDataOutputStream out;

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java?rev=1581440&r1=1581439&r2=1581440&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java
 Tue Mar 25 18:09:48 2014
@@ -63,8 +63,7 @@ public final class FileSystemTestWrapper
    */
   public long createFile(Path path, int numBlocks, CreateOpts... options)
       throws IOException {
-    BlockSize blockSizeOpt =
-      (BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
+    BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, 
options);
     long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
         : DEFAULT_BLOCK_SIZE;
     FSDataOutputStream out =
@@ -101,8 +100,7 @@ public final class FileSystemTestWrapper
 
   public void appendToFile(Path path, int numBlocks, CreateOpts... options)
       throws IOException {
-    BlockSize blockSizeOpt =
-      (BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
+    BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, 
options);
     long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
         : DEFAULT_BLOCK_SIZE;
     FSDataOutputStream out;
@@ -261,7 +259,7 @@ public final class FileSystemTestWrapper
     // Need to translate the FileContext-style options into FileSystem-style
 
     // Permissions with umask
-    CreateOpts.Perms permOpt = (CreateOpts.Perms) CreateOpts.getOpt(
+    CreateOpts.Perms permOpt = CreateOpts.getOpt(
         CreateOpts.Perms.class, opts);
     FsPermission umask = FsPermission.getUMask(fs.getConf());
     FsPermission permission = (permOpt != null) ? permOpt.getValue()
@@ -273,23 +271,22 @@ public final class FileSystemTestWrapper
     int bufferSize = fs.getConf().getInt(
         CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY,
         CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT);
-    CreateOpts.BufferSize bufOpt = (CreateOpts.BufferSize) CreateOpts.getOpt(
+    CreateOpts.BufferSize bufOpt = CreateOpts.getOpt(
         CreateOpts.BufferSize.class, opts);
     bufferSize = (bufOpt != null) ? bufOpt.getValue() : bufferSize;
     // replication
     short replication = fs.getDefaultReplication(f);
     CreateOpts.ReplicationFactor repOpt =
-        (CreateOpts.ReplicationFactor) CreateOpts.getOpt(
-            CreateOpts.ReplicationFactor.class, opts);
+        CreateOpts.getOpt(CreateOpts.ReplicationFactor.class, opts);
     replication = (repOpt != null) ? repOpt.getValue() : replication;
     // blockSize
     long blockSize = fs.getDefaultBlockSize(f);
-    CreateOpts.BlockSize blockOpt = (CreateOpts.BlockSize) CreateOpts.getOpt(
+    CreateOpts.BlockSize blockOpt = CreateOpts.getOpt(
         CreateOpts.BlockSize.class, opts);
     blockSize = (blockOpt != null) ? blockOpt.getValue() : blockSize;
     // Progressable
     Progressable progress = null;
-    CreateOpts.Progress progressOpt = (CreateOpts.Progress) CreateOpts.getOpt(
+    CreateOpts.Progress progressOpt = CreateOpts.getOpt(
         CreateOpts.Progress.class, opts);
     progress = (progressOpt != null) ? progressOpt.getValue() : progress;
     return fs.create(f, permission, overwrite, bufferSize, replication,


Reply via email to