Repository: hadoop
Updated Branches:
  refs/heads/trunk f261c3193 -> 7b57f2f71


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java
index bec0fe1..487a457 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java
@@ -168,7 +168,7 @@ public class NetworkTopologyWithNodeGroup extends 
NetworkTopology {
   }
 
   /** Add a leaf node
-   * Update node counter & rack counter if necessary
+   * Update node counter & rack counter if necessary
    * @param node node to be added; can be null
    * @exception IllegalArgumentException if add a node to a leave 
    *                                     or node to be added is not a leaf
@@ -272,7 +272,7 @@ public class NetworkTopologyWithNodeGroup extends 
NetworkTopology {
 
   /**
    * Sort nodes array by their distances to <i>reader</i>.
-   * <p/>
+   * <p>
    * This is the same as {@link NetworkTopology#sortByDistance(Node, Node[],
    * int)} except with a four-level network topology which contains the
    * additional network distance of a "node group" which is between local and

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java
index 02b44a5..4db8155 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java
@@ -33,13 +33,13 @@ import org.slf4j.LoggerFactory;
  * This class implements the {@link DNSToSwitchMapping} interface using a 
  * script configured via the
  * {@link CommonConfigurationKeys#NET_TOPOLOGY_SCRIPT_FILE_NAME_KEY} option.
- * <p/>
+ * <p>
  * It contains a static class <code>RawScriptBasedMapping</code> that performs
  * the work: reading the configuration parameters, executing any defined
  * script, handling errors and such like. The outer
  * class extends {@link CachedDNSToSwitchMapping} to cache the delegated
  * queries.
- * <p/>
+ * <p>
  * This DNS mapper's {@link #isSingleSwitch()} predicate returns
  * true if and only if a script is defined.
  */
@@ -78,7 +78,7 @@ public class ScriptBasedMapping extends 
CachedDNSToSwitchMapping {
 
   /**
    * Create an instance with the default configuration.
-   * </p>
+   * <p>
    * Calling {@link #setConf(Configuration)} will trigger a
    * re-evaluation of the configuration settings and so be used to
    * set up the mapping script.
@@ -125,7 +125,7 @@ public class ScriptBasedMapping extends 
CachedDNSToSwitchMapping {
 
   /**
    * {@inheritDoc}
-   * <p/>
+   * <p>
    * This will get called in the superclass constructor, so a check is needed
    * to ensure that the raw mapping is defined before trying to relaying a null
    * configuration.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMappingWithDependency.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMappingWithDependency.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMappingWithDependency.java
index 086650b..e05fae6 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMappingWithDependency.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMappingWithDependency.java
@@ -32,10 +32,9 @@ import org.apache.hadoop.fs.CommonConfigurationKeys;
  * the {@link DNSToSwitchMappingWithDependency} interface using 
  * a script configured via the 
  * {@link CommonConfigurationKeys#NET_DEPENDENCY_SCRIPT_FILE_NAME_KEY} option.
- * <p/>
+ * <p>
  * It contains a static class <code>RawScriptBasedMappingWithDependency</code>
  * that performs the getDependency work.
- * <p/>
  */
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
@@ -52,7 +51,7 @@ public class ScriptBasedMappingWithDependency  extends 
ScriptBasedMapping
 
   /**
    * Create an instance with the default configuration.
-   * </p>
+   * <p>
    * Calling {@link #setConf(Configuration)} will trigger a
    * re-evaluation of the configuration settings and so be used to
    * set up the mapping script.
@@ -76,7 +75,7 @@ public class ScriptBasedMappingWithDependency  extends 
ScriptBasedMapping
 
   /**
    * {@inheritDoc}
-   * <p/>
+   * <p>
    * This will get called in the superclass constructor, so a check is needed
    * to ensure that the raw mapping is defined before trying to relaying a null
    * configuration.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java
index ead1d7b2..93f4f56 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java
@@ -32,7 +32,6 @@ import java.nio.channels.WritableByteChannel;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.metrics2.lib.MutableRate;
 
 /**
  * This implements an output stream that can have a timeout while writing.
@@ -187,7 +186,7 @@ public class SocketOutputStream extends OutputStream
    * @param count number of bytes to transfer.
    * @param waitForWritableTime nanoseconds spent waiting for the socket 
    *        to become writable
-   * @param transferTime nanoseconds spent transferring data
+   * @param transferToTime nanoseconds spent transferring data
    * 
    * @throws EOFException 
    *         If end of input file is reached before requested number of 
@@ -253,7 +252,8 @@ public class SocketOutputStream extends OutputStream
 
   /**
    * Call
-   * {@link #transferToFully(FileChannel, long, int, MutableRate, MutableRate)}
+   * {@link #transferToFully(FileChannel, long, int, LongWritable, 
LongWritable)
+   * }
    * with null <code>waitForWritableTime</code> and <code>transferToTime</code>
    */
   public void transferToFully(FileChannel fileCh, long position, int count)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java
index 7e6b3a8..b25d5d7 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java
@@ -31,11 +31,11 @@ import java.util.Map;
 /**
  * Initializes hadoop-auth AuthenticationFilter which provides support for
  * Kerberos HTTP SPNEGO authentication.
- * <p/>
+ * <p>
  * It enables anonymous access, simple/speudo and Kerberos HTTP SPNEGO
  * authentication  for Hadoop JobTracker, NameNode, DataNodes and
  * TaskTrackers.
- * <p/>
+ * <p>
  * Refer to the <code>core-default.xml</code> file, after the comment
  * 'HTTP Authentication' for details on the configuration options.
  * All related configuration properties have 'hadoop.http.authentication.'
@@ -47,7 +47,7 @@ public class AuthenticationFilterInitializer extends 
FilterInitializer {
 
   /**
    * Initializes hadoop-auth AuthenticationFilter.
-   * <p/>
+   * <p>
    * Propagates to hadoop-auth AuthenticationFilter configuration all Hadoop
    * configuration properties prefixed with "hadoop.http.authentication."
    *

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java
index 55b2786..d7f0e81 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java
@@ -46,7 +46,7 @@ public class HadoopKerberosName extends KerberosName {
   }
   /**
    * Set the static configuration to get the rules.
-   * <p/>
+   * <p>
    * IMPORTANT: This method does a NOP if the rules have been set already.
    * If there is a need to reset the rules, the {@link 
KerberosName#setRules(String)}
    * method should be invoked directly.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/IdMappingServiceProvider.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/IdMappingServiceProvider.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/IdMappingServiceProvider.java
index 4a1185e..86edab7 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/IdMappingServiceProvider.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/IdMappingServiceProvider.java
@@ -25,8 +25,9 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 
 /**
- * An interface for the implementation of <userId, userName> mapping
- * and <groupId, groupName> mapping
+ * An interface for the implementation of {@literal <}userId,
+ * userName{@literal >} mapping and {@literal <}groupId, groupName{@literal >}
+ * mapping.
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
index 11714b1..d236ab0 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
@@ -343,13 +343,9 @@ public class SaslRpcClient {
   }
 
   /**
-   * Do client side SASL authentication with server via the given InputStream
-   * and OutputStream
-   * 
-   * @param inS
-   *          InputStream to use
-   * @param outS
-   *          OutputStream to use
+   * Do client side SASL authentication with server via the given IpcStreams.
+   *
+   * @param ipcStreams
    * @return AuthMethod used to negotiate the connection
    * @throws IOException
    */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
index 2313119..aa12b93 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
@@ -332,7 +332,8 @@ public final class SecurityUtil {
    }
   
   /**
-   * Get the host name from the principal name of format <service>/host@realm.
+   * Get the host name from the principal name of format {@literal <}service
+   * {@literal >}/host@realm.
    * @param principalName principal name of format as described above
    * @return host name if the the string conforms to the above format, else 
null
    */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
index 6ce72edb..db88106 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
@@ -676,7 +676,7 @@ public class UserGroupInformation {
 
   /**
    * remove the login method that is followed by a space from the username
-   * e.g. "jack (auth:SIMPLE)" -> "jack"
+   * e.g. "jack (auth:SIMPLE)" {@literal ->} "jack"
    *
    * @param userName
    * @return userName without login method

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/JavaKeyStoreProvider.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/JavaKeyStoreProvider.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/JavaKeyStoreProvider.java
index 52f39ef..5028482 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/JavaKeyStoreProvider.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/JavaKeyStoreProvider.java
@@ -33,9 +33,9 @@ import java.net.URI;
 /**
  * CredentialProvider based on Java's KeyStore file format. The file may be
  * stored in any Hadoop FileSystem using the following name mangling:
- * jceks://[email protected]/my/creds.jceks ->
- * hdfs://nn1.example.com/my/creds.jceks jceks://file/home/larry/creds.jceks ->
- * file:///home/larry/creds.jceks
+ * jceks://[email protected]/my/creds.jceks {@literal ->}
+ * hdfs://nn1.example.com/my/creds.jceks jceks://file/home/larry/creds.jceks
+ * {@literal ->} file:///home/larry/creds.jceks
  */
 @InterfaceAudience.Private
 public class JavaKeyStoreProvider extends AbstractJavaKeyStoreProvider {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java
index 9ea9a57..ce0eb7d 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java
@@ -44,7 +44,8 @@ import java.util.EnumSet;
 /**
  * CredentialProvider based on Java's KeyStore file format. The file may be
  * stored only on the local filesystem using the following name mangling:
- * localjceks://file/home/larry/creds.jceks -> file:///home/larry/creds.jceks
+ * localjceks://file/home/larry/creds.jceks {@literal ->}
+ * file:///home/larry/creds.jceks
  */
 @InterfaceAudience.Private
 public final class LocalJavaKeyStoreProvider extends

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
index 60d82cb..97a7f08 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
@@ -108,12 +108,12 @@ public class ProxyUsers {
    * @param remoteAddress
    * @param conf
    * @throws AuthorizationException
-   * @deprecated use {@link #authorize(UserGroupInformation, String) instead. 
+   * @deprecated use {@link #authorize(UserGroupInformation, String)} instead.
    */
   @Deprecated
   public static void authorize(UserGroupInformation user, 
       String remoteAddress, Configuration conf) throws AuthorizationException {
-    authorize(user,remoteAddress);
+    authorize(user, remoteAddress);
   }
   
   @VisibleForTesting 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
index b0df8f0..e5bdab3 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
@@ -38,7 +38,7 @@ import java.text.MessageFormat;
 /**
  * {@link KeyStoresFactory} implementation that reads the certificates from
  * keystore files.
- * <p/>
+ * <p>
  * if the trust certificates keystore file changes, the {@link TrustManager}
  * is refreshed with the new trust certificate entries (using a
  * {@link ReloadingX509TrustManager} trustmanager).
@@ -87,7 +87,7 @@ public class FileBasedKeyStoresFactory implements 
KeyStoresFactory {
 
   /**
    * Resolves a property name to its client/server version if applicable.
-   * <p/>
+   * <p>
    * NOTE: This method is public for testing purposes.
    *
    * @param mode client/server mode.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
index f05274a..10c1d7d 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
@@ -44,10 +44,10 @@ import java.util.List;
 /**
  * Factory that creates SSLEngine and SSLSocketFactory instances using
  * Hadoop configuration information.
- * <p/>
+ * <p>
  * This SSLFactory uses a {@link ReloadingX509TrustManager} instance,
  * which reloads public keys if the truststore file changes.
- * <p/>
+ * <p>
  * This factory is used to configure HTTPS in Hadoop HTTP based endpoints, both
  * client and server.
  */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
index 47546b3..f9236b6 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
@@ -58,15 +58,15 @@ import org.slf4j.LoggerFactory;
  * or X509Certificate, or ultimately (they all end up calling this one),
  * String.  (It's easier to supply JUnit with Strings instead of mock
  * SSLSession objects!)
- * </p><p>Our check() methods throw exceptions if the name is
+ * <p>Our check() methods throw exceptions if the name is
  * invalid, whereas javax.net.ssl.HostnameVerifier just returns true/false.
- * <p/>
+ * <p>
  * We provide the HostnameVerifier.DEFAULT, HostnameVerifier.STRICT, and
  * HostnameVerifier.ALLOW_ALL implementations.  We also provide the more
  * specialized HostnameVerifier.DEFAULT_AND_LOCALHOST, as well as
  * HostnameVerifier.STRICT_IE6.  But feel free to define your own
  * implementations!
- * <p/>
+ * <p>
  * Inspired by Sebastian Hauer's original StrictSSLProtocolSocketFactory in the
  * HttpClient "contrib" repository.
  */
@@ -109,10 +109,10 @@ public interface SSLHostnameVerifier extends 
javax.net.ssl.HostnameVerifier {
 
     /**
      * The DEFAULT HostnameVerifier works the same way as Curl and Firefox.
-     * <p/>
+     * <p>
      * The hostname must match either the first CN, or any of the subject-alts.
      * A wildcard can occur in the CN, and in any of the subject-alts.
-     * <p/>
+     * <p>
      * The only difference between DEFAULT and STRICT is that a wildcard (such
      * as "*.foo.com") with DEFAULT matches all subdomains, including
      * "a.b.foo.com".
@@ -158,13 +158,13 @@ public interface SSLHostnameVerifier extends 
javax.net.ssl.HostnameVerifier {
      * Java 1.4, Sun Java 5, Sun Java 6.  It's also pretty close to IE6.
      * This implementation appears to be compliant with RFC 2818 for dealing
      * with wildcards.
-     * <p/>
+     * <p>
      * The hostname must match either the first CN, or any of the subject-alts.
      * A wildcard can occur in the CN, and in any of the subject-alts.  The
      * one divergence from IE6 is how we only check the first CN.  IE6 allows
      * a match against any of the CNs present.  We decided to follow in
      * Sun Java 1.4's footsteps and only check the first CN.
-     * <p/>
+     * <p>
      * A wildcard such as "*.foo.com" matches only subdomains in the same
      * level, for example "a.foo.com".  It does not match deeper subdomains
      * such as "a.b.foo.com".
@@ -229,7 +229,7 @@ public interface SSLHostnameVerifier extends 
javax.net.ssl.HostnameVerifier {
          * This contains a list of 2nd-level domains that aren't allowed to
          * have wildcards when combined with country-codes.
          * For example: [*.co.uk].
-         * <p/>
+         * <p>
          * The [*.co.uk] problem is an interesting one.  Should we just hope
          * that CA's would never foolishly allow such a certificate to happen?
          * Looks like we're the only implementation guarding against this.
@@ -564,11 +564,11 @@ public interface SSLHostnameVerifier extends 
javax.net.ssl.HostnameVerifier {
       /**
        * Extracts the array of SubjectAlt DNS names from an X509Certificate.
        * Returns null if there aren't any.
-       * <p/>
+       * <p>
        * Note:  Java doesn't appear able to extract international characters
        * from the SubjectAlts.  It can only extract international characters
        * from the CN field.
-       * <p/>
+       * <p>
        * (Or maybe the version of OpenSSL I'm using to test isn't storing the
        * international characters correctly in the SubjectAlts?).
        *

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java
index 0b1fdf8..35589a2 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java
@@ -44,19 +44,19 @@ import java.util.Map;
  * The <code>DelegationTokenAuthenticatedURL</code> is a
  * {@link AuthenticatedURL} sub-class with built-in Hadoop Delegation Token
  * functionality.
- * <p/>
+ * <p>
  * The authentication mechanisms supported by default are Hadoop Simple
  * authentication (also known as pseudo authentication) and Kerberos SPNEGO
  * authentication.
- * <p/>
+ * <p>
  * Additional authentication mechanisms can be supported via {@link
  * DelegationTokenAuthenticator} implementations.
- * <p/>
+ * <p>
  * The default {@link DelegationTokenAuthenticator} is the {@link
  * KerberosDelegationTokenAuthenticator} class which supports
  * automatic fallback from Kerberos SPNEGO to Hadoop Simple authentication via
  * the {@link PseudoDelegationTokenAuthenticator} class.
- * <p/>
+ * <p>
  * <code>AuthenticatedURL</code> instances are not thread-safe.
  */
 @InterfaceAudience.Public
@@ -115,7 +115,7 @@ public class DelegationTokenAuthenticatedURL extends 
AuthenticatedURL {
    * Returns the default {@link DelegationTokenAuthenticator} class to use when
    * an {@link DelegationTokenAuthenticatedURL} instance is created without
    * specifying one.
-   * <p/>
+   * <p>
    * The default class is {@link KerberosDelegationTokenAuthenticator}
    *
    * @return the delegation token authenticator class to use as default.
@@ -143,7 +143,7 @@ public class DelegationTokenAuthenticatedURL extends 
AuthenticatedURL {
 
   /**
    * Creates an <code>DelegationTokenAuthenticatedURL</code>.
-   * <p/>
+   * <p>
    * An instance of the default {@link DelegationTokenAuthenticator} will be
    * used.
    */
@@ -191,7 +191,7 @@ public class DelegationTokenAuthenticatedURL extends 
AuthenticatedURL {
    * Sets if delegation token should be transmitted in the URL query string.
    * By default it is transmitted using the
    * {@link DelegationTokenAuthenticator#DELEGATION_TOKEN_HEADER} HTTP header.
-   * <p/>
+   * <p>
    * This method is provided to enable WebHDFS backwards compatibility.
    *
    * @param useQueryString  <code>TRUE</code> if the token is transmitted in 
the

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java
index f5e798e..5275526 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java
@@ -60,7 +60,7 @@ import java.util.Properties;
 /**
  *  The <code>DelegationTokenAuthenticationFilter</code> filter is a
  *  {@link AuthenticationFilter} with Hadoop Delegation Token support.
- *  <p/>
+ *  <p>
  *  By default it uses it own instance of the {@link
  *  AbstractDelegationTokenSecretManager}. For situations where an external
  *  <code>AbstractDelegationTokenSecretManager</code> is required (i.e. one 
that
@@ -86,7 +86,7 @@ public class DelegationTokenAuthenticationFilter
   /**
    * Sets an external <code>DelegationTokenSecretManager</code> instance to
    * manage creation and verification of Delegation Tokens.
-   * <p/>
+   * <p>
    * This is useful for use cases where secrets must be shared across multiple
    * services.
    */
@@ -148,7 +148,7 @@ public class DelegationTokenAuthenticationFilter
   /**
    * Returns the proxyuser configuration. All returned properties must start
    * with <code>proxyuser.</code>'
-   * <p/>
+   * <p>
    * Subclasses may override this method if the proxyuser configuration is 
    * read from other place than the filter init parameters.
    *

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
index 6ee59f1..284044f 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
@@ -59,7 +59,7 @@ import com.google.common.annotations.VisibleForTesting;
 /**
  * An {@link AuthenticationHandler} that implements Kerberos SPNEGO mechanism
  * for HTTP and supports Delegation Token functionality.
- * <p/>
+ * <p>
  * In addition to the wrapped {@link AuthenticationHandler} configuration
  * properties, this handler supports the following properties prefixed
  * with the type of the wrapped <code>AuthenticationHandler</code>:
@@ -135,7 +135,7 @@ public abstract class DelegationTokenAuthenticationHandler
   /**
    * Sets an external <code>DelegationTokenSecretManager</code> instance to
    * manage creation and verification of Delegation Tokens.
-   * <p/>
+   * <p>
    * This is useful for use cases where secrets must be shared across multiple
    * services.
    *

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenManager.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenManager.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenManager.java
index fd19b67..e1445fb 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenManager.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenManager.java
@@ -127,7 +127,7 @@ public class DelegationTokenManager {
   /**
    * Sets an external <code>DelegationTokenSecretManager</code> instance to
    * manage creation and verification of Delegation Tokens.
-   * <p/>
+   * <p>
    * This is useful for use cases where secrets must be shared across multiple
    * services.
    *

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/KerberosDelegationTokenAuthenticationHandler.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/KerberosDelegationTokenAuthenticationHandler.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/KerberosDelegationTokenAuthenticationHandler.java
index 395d2f2..28509e1 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/KerberosDelegationTokenAuthenticationHandler.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/KerberosDelegationTokenAuthenticationHandler.java
@@ -25,7 +25,7 @@ import 
org.apache.hadoop.security.authentication.server.KerberosAuthenticationHa
 /**
  * An {@link AuthenticationHandler} that implements Kerberos SPNEGO mechanism
  * for HTTP and supports Delegation Token functionality.
- * <p/>
+ * <p>
  * In addition to the {@link KerberosAuthenticationHandler} configuration
  * properties, this handler supports:
  * <ul>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/KerberosDelegationTokenAuthenticator.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/KerberosDelegationTokenAuthenticator.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/KerberosDelegationTokenAuthenticator.java
index 7e0e266..8b8a4bd 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/KerberosDelegationTokenAuthenticator.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/KerberosDelegationTokenAuthenticator.java
@@ -26,7 +26,7 @@ import 
org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
  * The <code>KerberosDelegationTokenAuthenticator</code> provides support for
  * Kerberos SPNEGO authentication mechanism and support for Hadoop Delegation
  * Token operations.
- * <p/>
+ * <p>
  * It falls back to the {@link PseudoDelegationTokenAuthenticator} if the HTTP
  * endpoint does not trigger a SPNEGO authentication
  */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/MultiSchemeDelegationTokenAuthenticationHandler.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/MultiSchemeDelegationTokenAuthenticationHandler.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/MultiSchemeDelegationTokenAuthenticationHandler.java
index fc32a19..0661fb2 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/MultiSchemeDelegationTokenAuthenticationHandler.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/MultiSchemeDelegationTokenAuthenticationHandler.java
@@ -29,6 +29,7 @@ import javax.servlet.http.HttpServletResponse;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import 
org.apache.hadoop.security.authentication.client.AuthenticationException;
+import org.apache.hadoop.security.authentication.server.AuthenticationHandler;
 import 
org.apache.hadoop.security.authentication.server.AuthenticationHandlerUtil;
 import org.apache.hadoop.security.authentication.server.AuthenticationToken;
 import 
org.apache.hadoop.security.authentication.server.CompositeAuthenticationHandler;
@@ -52,7 +53,7 @@ import com.google.common.base.Splitter;
  * required to ensure that only schemes with strongest level of security should
  * be used for delegation token management.
  *
- * <p/>
+ * <p>
  * In addition to the wrapped {@link AuthenticationHandler} configuration
  * properties, this handler supports the following properties prefixed with the
  * type of the wrapped <code>AuthenticationHandler</code>:

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/PseudoDelegationTokenAuthenticationHandler.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/PseudoDelegationTokenAuthenticationHandler.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/PseudoDelegationTokenAuthenticationHandler.java
index 6846fdb..9a4527a 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/PseudoDelegationTokenAuthenticationHandler.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/PseudoDelegationTokenAuthenticationHandler.java
@@ -26,7 +26,7 @@ import 
org.apache.hadoop.security.authentication.server.PseudoAuthenticationHand
 /**
  * An {@link AuthenticationHandler} that implements Kerberos SPNEGO mechanism
  * for HTTP and supports Delegation Token functionality.
- * <p/>
+ * <p>
  * In addition to the {@link KerberosAuthenticationHandler} configuration
  * properties, this handler supports:
  * <ul>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/PseudoDelegationTokenAuthenticator.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/PseudoDelegationTokenAuthenticator.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/PseudoDelegationTokenAuthenticator.java
index 8713aa4..3478f39 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/PseudoDelegationTokenAuthenticator.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/PseudoDelegationTokenAuthenticator.java
@@ -29,7 +29,7 @@ import java.io.IOException;
  * Hadoop's pseudo authentication mechanism that accepts
  * the user name specified as a query string parameter and support for Hadoop
  * Delegation Token operations.
- * <p/>
+ * <p>
  * This mimics the model of Hadoop Simple authentication trusting the
  * {@link UserGroupInformation#getCurrentUser()} value.
  */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java
index d064ef9..726a83d 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java
@@ -42,9 +42,9 @@ public final class ServiceOperations {
 
   /**
    * Stop a service.
-   * <p/>Do nothing if the service is null or not
+   * <p>Do nothing if the service is null or not
    * in a state in which it can be/needs to be stopped.
-   * <p/>
+   * <p>
    * The service state is checked <i>before</i> the operation begins.
    * This process is <i>not</i> thread safe.
    * @param service a service or null

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java
index 6b0b4e8..da91a3d 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java
@@ -268,7 +268,7 @@ public class ServiceLauncher<S extends Service>
    * <ol>
    * <li>Parse the command line.</li> 
    * <li>Build the service configuration from it.</li>
-   * <li>Start the service.</li>.
+   * <li>Start the service.</li>
    * <li>If it is a {@link LaunchableService}: execute it</li>
    * <li>Otherwise: wait for it to finish.</li>
    * <li>Exit passing the status code to the {@link #exit(int, String)}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/package-info.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/package-info.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/package-info.java
index 8516357..f582fa2 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/package-info.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/package-info.java
@@ -379,7 +379,7 @@
  a new configuration is created:
 
  <pre>
- public Configuration bindArgs(Configuration config, List<String> args)
+ public Configuration bindArgs(Configuration config, List&lt;String&gt; args)
     throws Exception {
    Configuration newConf = new YarnConfiguration(config);
    return newConf;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ClassUtil.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ClassUtil.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ClassUtil.java
index 6f94989..44c9466 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ClassUtil.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ClassUtil.java
@@ -34,7 +34,6 @@ public class ClassUtil {
    * 
    * @param clazz the class to find.
    * @return a jar file that contains the class, or null.
-   * @throws IOException
    */
   public static String findContainingJar(Class<?> clazz) {
     ClassLoader loader = clazz.getClassLoader();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java
index 1f34291..bcb17d0 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java
@@ -64,7 +64,7 @@ import java.util.Stack;
  *     Unknown qualifiers are considered after known qualifiers, with lexical 
order (always case insensitive),
  *   </li>
  * <li>a dash usually precedes a qualifier, and is always less important than 
something preceded with a dot.</li>
- * </ul></p>
+ * </ul><p>
  *
  * @see <a 
href="https://cwiki.apache.org/confluence/display/MAVENOLD/Versioning";>"Versioning"
 on Maven Wiki</a>
  */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FindClass.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FindClass.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FindClass.java
index b7feb22..690d097 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FindClass.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FindClass.java
@@ -33,9 +33,9 @@ import java.security.CodeSource;
  * This entry point exists for diagnosing classloader problems:
  * is a class or resource present -and if so, where?
  *
- * <p/>
+ * <p>
  * Actions
- * <p/>
+ * <br>
  * <ul>
  *   <li><pre>load</pre>: load a class but do not attempt to create it </li>
  *   <li><pre>create</pre>: load and create a class, print its string 
value</li>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
index a8a513d..8ca7a90 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
@@ -15,7 +15,8 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.util;import java.io.File;
+package org.apache.hadoop.util;
+import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.PrintStream;
@@ -54,9 +55,9 @@ import org.slf4j.LoggerFactory;
  * line arguments, enabling applications to easily specify a namenode, a 
  * ResourceManager, additional configuration resources etc.
  * 
- * <h4 id="GenericOptions">Generic Options</h4>
+ * <h3 id="GenericOptions">Generic Options</h3>
  * 
- * <p>The supported generic options are:</p>
+ * <p>The supported generic options are:
  * <p><blockquote><pre>
  *     -conf &lt;configuration file&gt;     specify a configuration file
  *     -D &lt;property=value&gt;            use value for given property
@@ -69,12 +70,12 @@ import org.slf4j.LoggerFactory;
  *     -archives &lt;comma separated list of archives&gt;    specify comma
  *             separated archives to be unarchived on the compute machines.
 
- * </pre></blockquote></p>
+ * </pre></blockquote><p>
  * 
  * <p>The general command line syntax is:</p>
- * <p><tt><pre>
+ * <p><pre><code>
  * bin/hadoop command [genericOptions] [commandOptions]
- * </pre></tt></p>
+ * </code></pre><p>
  * 
  * <p>Generic command line arguments <strong>might</strong> modify 
  * <code>Configuration </code> objects, given to constructors.</p>
@@ -104,7 +105,7 @@ import org.slf4j.LoggerFactory;
  * $ bin/hadoop jar -libjars testlib.jar 
  * -archives test.tgz -files file.txt inputjar args
  * job submission with libjars, files and archives
- * </pre></blockquote></p>
+ * </pre></blockquote><p>
  *
  * @see Tool
  * @see ToolRunner
@@ -141,8 +142,8 @@ public class GenericOptionsParser {
   }
   
   /** 
-   * Create a <code>GenericOptionsParser<code> to parse only the generic 
Hadoop  
-   * arguments. 
+   * Create a <code>GenericOptionsParser</code> to parse only the generic
+   * Hadoop arguments.
    * 
    * The array of string arguments other than the generic arguments can be 
    * obtained by {@link #getRemainingArgs()}.
@@ -217,7 +218,7 @@ public class GenericOptionsParser {
 
   /**
    * Specify properties of each generic option.
-   * <i>Important</i?: as {@link OptionBuilder} is not thread safe, subclasses
+   * <i>Important</i>: as {@link OptionBuilder} is not thread safe, subclasses
    * must synchronize use on {@code OptionBuilder.class}
    */
   @SuppressWarnings("static-access")

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HttpExceptionUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HttpExceptionUtils.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HttpExceptionUtils.java
index 50be1c3..366c8c7 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HttpExceptionUtils.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HttpExceptionUtils.java
@@ -34,10 +34,10 @@ import java.util.Map;
 /**
  * HTTP utility class to help propagate server side exception to the client
  * over HTTP as a JSON payload.
- * <p/>
+ * <p>
  * It creates HTTP Servlet and JAX-RPC error responses including details of the
  * exception that allows a client to recreate the remote exception.
- * <p/>
+ * <p>
  * It parses HTTP client connections and recreates the exception.
  */
 @InterfaceAudience.Private
@@ -125,7 +125,7 @@ public class HttpExceptionUtils {
    * expected HTTP status code. If the current status code is not the expected
    * one it throws an exception with a detail message using Server side error
    * messages if available.
-   * <p/>
+   * <p>
    * <b>NOTE:</b> this method will throw the deserialized exception even if not
    * declared in the <code>throws</code> of the method signature.
    *

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java
index cbc8560..7e09a61 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java
@@ -185,7 +185,7 @@ public class JsonSerialization<T> {
    * Save to a local file. Any existing file is overwritten unless
    * the OS blocks that.
    * @param file file
-   * @param path path
+   * @param instance instance
    * @throws IOException IO exception
    */
   public void save(File file, T instance) throws

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightCache.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightCache.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightCache.java
index d79aade..79de1ac 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightCache.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightCache.java
@@ -99,11 +99,11 @@ public class LightWeightCache<K, E extends K> extends 
LightWeightGSet<K, E> {
   /**
    * @param recommendedLength Recommended size of the internal array.
    * @param sizeLimit the limit of the size of the cache.
-   *            The limit is disabled if it is <= 0.
-   * @param creationExpirationPeriod the time period C > 0 in nanoseconds that
-   *            the creation of an entry is expired if it is added to the cache
-   *            longer than C.
-   * @param accessExpirationPeriod the time period A >= 0 in nanoseconds that
+   *            The limit is disabled if it is &lt;= 0.
+   * @param creationExpirationPeriod the time period C &gt; 0 in nanoseconds
+   *            that the creation of an entry is expired if it is added to the
+   *            cache longer than C.
+   * @param accessExpirationPeriod the time period A &gt;= 0 in nanoseconds 
that
    *            the access of an entry is expired if it is not accessed
    *            longer than A. 
    */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java
index a1cf709..e2cd304 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java
@@ -62,7 +62,6 @@ public class LineReader implements Closeable {
    * Create a line reader that reads from the given stream using the
    * default buffer-size (64k).
    * @param in The input stream
-   * @throws IOException
    */
   public LineReader(InputStream in) {
     this(in, DEFAULT_BUFFER_SIZE);
@@ -73,7 +72,6 @@ public class LineReader implements Closeable {
    * given buffer-size.
    * @param in The input stream
    * @param bufferSize Size of the read buffer
-   * @throws IOException
    */
   public LineReader(InputStream in, int bufferSize) {
     this.in = in;
@@ -115,7 +113,6 @@ public class LineReader implements Closeable {
    * @param in The input stream
    * @param bufferSize Size of the read buffer
    * @param recordDelimiterBytes The delimiter
-   * @throws IOException
    */
   public LineReader(InputStream in, int bufferSize,
       byte[] recordDelimiterBytes) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java
index b01330f..20931de 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java
@@ -187,7 +187,7 @@ public class MachineList {
   }
 
   /**
-   * returns the contents of the MachineList as a Collection<String>
+   * returns the contents of the MachineList as a Collection&lt;String&gt;
    * This can be used for testing 
    * @return contents of the MachineList
    */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java
index 2ca8e55..556f4e0 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java
@@ -46,7 +46,7 @@ import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.SERVICE_SHUTDOW
 /**
  * The <code>ShutdownHookManager</code> enables running shutdownHook
  * in a deterministic order, higher priority first.
- * <p/>
+ * <p>
  * The JVM runs ShutdownHooks in a non-deterministic order or in parallel.
  * This class registers a single JVM shutdownHook and run all the
  * shutdownHooks registered to it (to this class) in order based on their

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java
index 5405d77..50a728e 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java
@@ -39,7 +39,6 @@ public class ShutdownThreadsHelper {
    * @param thread {@link Thread to be shutdown}
    * @return <tt>true</tt> if the thread is successfully interrupted,
    * <tt>false</tt> otherwise
-   * @throws InterruptedException
    */
   public static boolean shutdownThread(Thread thread) {
     return shutdownThread(thread, SHUTDOWN_WAIT_MS);
@@ -51,7 +50,6 @@ public class ShutdownThreadsHelper {
    *                              interrupted
    * @return <tt>true</tt> if the thread is successfully interrupted,
    * <tt>false</tt> otherwise
-   * @throws InterruptedException
    */
   public static boolean shutdownThread(Thread thread,
                                     long timeoutInMilliSeconds) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
index f49698c..d19ced9 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
@@ -428,7 +428,7 @@ public class StringUtils {
    * Splits a comma separated value <code>String</code>, trimming leading and
    * trailing whitespace on each value. Duplicate and empty values are removed.
    *
-   * @param str a comma separated <String> with values, may be null
+   * @param str a comma separated <code>String</code> with values, may be null
    * @return a <code>Collection</code> of <code>String</code> values, empty
    *         Collection if null String input
    */
@@ -1006,8 +1006,8 @@ public class StringUtils {
    * @param template String template to receive replacements
    * @param pattern Pattern to match for identifying tokens, must use a 
capturing
    *   group
-   * @param replacements Map<String, String> mapping tokens identified by the
-   *   capturing group to their replacement values
+   * @param replacements Map&lt;String, String&gt; mapping tokens identified by
+   * the capturing group to their replacement values
    * @return String template with replacements
    */
   public static String replaceTokens(String template, Pattern pattern,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java
index b6e3d7d..a4fbce4 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java
@@ -69,7 +69,7 @@ import org.apache.hadoop.conf.Configurable;
  *         System.exit(res);
  *       }
  *     }
- * </pre></blockquote></p>
+ * </pre></blockquote><p>
  * 
  * @see GenericOptionsParser
  * @see ToolRunner

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java
index d23df79..fe64adb 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java
@@ -90,7 +90,7 @@ public class ZKUtil {
    * 
<code>sasl:hdfs/[email protected]:cdrwa,sasl:hdfs/[email protected]:cdrwa</code>
    *
    * @return ACL list
-   * @throws {@link BadAclFormatException} if an ACL is invalid
+   * @throws BadAclFormatException if an ACL is invalid
    */
   public static List<ACL> parseACLs(String aclString) throws
       BadAclFormatException {
@@ -128,7 +128,7 @@ public class ZKUtil {
    * 
    * @param authString the comma-separated auth mechanisms
    * @return a list of parsed authentications
-   * @throws {@link BadAuthFormatException} if the auth format is invalid
+   * @throws BadAuthFormatException if the auth format is invalid
    */
   public static List<ZKAuthInfo> parseAuth(String authString) throws
       BadAuthFormatException{

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/DynamicBloomFilter.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/DynamicBloomFilter.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/DynamicBloomFilter.java
index be9a407..8a7ec69 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/DynamicBloomFilter.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/DynamicBloomFilter.java
@@ -64,8 +64,9 @@ import org.apache.hadoop.classification.InterfaceStability;
  * process of a DBF is iterative. At the start, the DBF is a <code>1 * m</code>
  * bit matrix, i.e., it is composed of a single standard Bloom filter.
  * It assumes that <code>n<sub>r</sub></code> elements are recorded in the 
- * initial bit vector, where <code>n<sub>r</sub> <= n</code> (<code>n</code> is
- * the cardinality of the set <code>A</code> to record in the filter).  
+ * initial bit vector, where <code>n<sub>r</sub> {@literal <=} n</code>
+ * (<code>n</code> is the cardinality of the set <code>A</code> to record in
+ * the filter).
  * <p>
  * As the size of <code>A</code> grows during the execution of the application,
  * several keys must be inserted in the DBF.  When inserting a key into the 
DBF,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java
index f124890..9304b48 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java
@@ -37,7 +37,7 @@ public interface AsyncGet<R, E extends Throwable> {
    *
    * @param timeout The maximum time period to wait.
    *                When timeout == 0, it does not wait at all.
-   *                When timeout < 0, it waits indefinitely.
+   *                When timeout &lt; 0, it waits indefinitely.
    * @param unit The unit of the timeout value
    * @return the result, which is possibly null.
    * @throws E an exception thrown by the underlying implementation.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java
index f3895d0..8c3b9da 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java
@@ -69,11 +69,11 @@ public class JenkinsHash extends Hash {
    * <p>The best hash table sizes are powers of 2.  There is no need to do mod
    * a prime (mod is sooo slow!).  If you need less than 32 bits, use a 
bitmask.
    * For example, if you need only 10 bits, do
-   * <code>h = (h & hashmask(10));</code>
+   * <code>h = (h &amp; hashmask(10));</code>
    * In which case, the hash table should have hashsize(10) elements.
    * 
    * <p>If you are hashing n strings byte[][] k, do it like this:
-   * for (int i = 0, h = 0; i < n; ++i) h = hash( k[i], h);
+   * for (int i = 0, h = 0; i &lt; n; ++i) h = hash( k[i], h);
    * 
    * <p>By Bob Jenkins, 2006.  [email protected].  You may use this
    * code any way you wish, private, educational, or commercial.  It's free.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java
index 8823f5c..a7a8fa5 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p/>
+ * <p>
  * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
+ * <p>
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -20,7 +20,7 @@ package org.apache.hadoop.cli.util;
 import org.apache.hadoop.conf.Configuration;
 
 /**
- * This interface is to generalize types of test command for upstream projects
+ * This interface is to generalize types of test command for upstream projects.
  */
 public interface CLICommand {
   public CommandExecutor getExecutor(String tag, Configuration conf)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommandFS.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommandFS.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommandFS.java
index eb96a06..e22c200 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommandFS.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommandFS.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p/>
+ * <p>
  * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
+ * <p>
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommandTypes.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommandTypes.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommandTypes.java
index 44e0c30..8efe70c 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommandTypes.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommandTypes.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p/>
+ * <p>
  * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
+ * <p>
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -18,7 +18,7 @@
 package org.apache.hadoop.cli.util;
 
 /**
- * This interface is to provide command type for test commands enums
+ * This interface is to provide command type for test commands enums.
  */
 public interface CLICommandTypes {
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderBenchmark.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderBenchmark.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderBenchmark.java
index df8c54b..362bde9 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderBenchmark.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderBenchmark.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p/>
+ * <p>
  * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
+ * <p>
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestDefaultRetryPolicy.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestDefaultRetryPolicy.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestDefaultRetryPolicy.java
index 56dec3a..6b82077 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestDefaultRetryPolicy.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestDefaultRetryPolicy.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p/>
+ * <p>
  * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
+ * <p>
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java
index 493d860..eb51820 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java
@@ -44,9 +44,9 @@ public class StaticMapping extends AbstractDNSToSwitchMapping 
 {
   /**
    * Key to define the node mapping as a comma-delimited list of host=rack
    * mappings, e.g. <code>host1=r1,host2=r1,host3=r2</code>.
-   * <p/>
+   * <p>
    * Value: {@value}
-   * <p/>
+   * <p>
    * <b>Important: </b>spaces not trimmed and are considered significant.
    */
   public static final String KEY_HADOOP_CONFIGURED_NODE_MAPPING =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b57f2f7/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tracing/SetSpanReceiver.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tracing/SetSpanReceiver.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tracing/SetSpanReceiver.java
index 2bc68ce..d4599b0 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tracing/SetSpanReceiver.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tracing/SetSpanReceiver.java
@@ -35,7 +35,7 @@ import org.junit.Assert;
 /**
  * Span receiver that puts all spans into a single set.
  * This is useful for testing.
- * <p/>
+ * <p>
  * We're not using HTrace's POJOReceiver here so as that doesn't
  * push all the metrics to a static place, and would make testing
  * SpanReceiverHost harder.


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to