http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
index 6fdc77e..5008354 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
@@ -18,14 +18,6 @@
  */
 package org.apache.hadoop.hbase;
 
-import com.google.protobuf.Service;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.CoprocessorEnvironment;
-import org.apache.hadoop.hbase.coprocessor.*;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.io.IOException;
 import java.lang.management.ManagementFactory;
 import java.rmi.registry.LocateRegistry;
@@ -34,13 +26,20 @@ import java.rmi.server.RMIClientSocketFactory;
 import java.rmi.server.RMIServerSocketFactory;
 import java.rmi.server.UnicastRemoteObject;
 import java.util.HashMap;
-import java.util.Optional;
-
 import javax.management.MBeanServer;
 import javax.management.remote.JMXConnectorServer;
 import javax.management.remote.JMXConnectorServerFactory;
 import javax.management.remote.JMXServiceURL;
 import javax.management.remote.rmi.RMIConnectorServer;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Pluggable JMX Agent for HBase(to fix the 2 random TCP ports issue
@@ -49,6 +48,7 @@ import javax.management.remote.rmi.RMIConnectorServer;
  * 2)support password authentication
  * 3)support subset of SSL (with default configuration)
  */
[email protected]
 public class JMXListener implements MasterCoprocessor, RegionServerCoprocessor 
{
   private static final Logger LOG = LoggerFactory.getLogger(JMXListener.class);
   public static final String RMI_REGISTRY_PORT_CONF_KEY = ".rmi.registry.port";

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIClientSocketFactorySecure.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIClientSocketFactorySecure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIClientSocketFactorySecure.java
index d505d6f..5dffb73 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIClientSocketFactorySecure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIClientSocketFactorySecure.java
@@ -13,14 +13,15 @@ package org.apache.hadoop.hbase;
 import java.io.IOException;
 import java.net.Socket;
 import java.util.ArrayList;
-
 import javax.net.ssl.SSLSocket;
 import javax.rmi.ssl.SslRMIClientSocketFactory;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Avoid SSL V3.0 "Poodle" Vulnerability - CVE-2014-3566
  */
 @SuppressWarnings("serial")
[email protected]
 public class SslRMIClientSocketFactorySecure extends SslRMIClientSocketFactory 
{
   @Override
   public Socket createSocket(String host, int port) throws IOException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
index 3583afe..8a92236 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
@@ -14,14 +14,15 @@ import java.io.IOException;
 import java.net.ServerSocket;
 import java.net.Socket;
 import java.util.ArrayList;
-
 import javax.net.ssl.SSLSocket;
 import javax.net.ssl.SSLSocketFactory;
 import javax.rmi.ssl.SslRMIServerSocketFactory;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Avoid SSL V3.0 "Poodle" Vulnerability - CVE-2014-3566
  */
[email protected]
 public class SslRMIServerSocketFactorySecure extends SslRMIServerSocketFactory 
{
   // If you add more constructors, you may have to change the rest of this 
implementation,
   // which assumes an empty constructor, i.e. there are no specially enabled 
protocols or

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/ZNodeClearer.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ZNodeClearer.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ZNodeClearer.java
index 3e911a8..f0b3a41 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ZNodeClearer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ZNodeClearer.java
@@ -25,13 +25,13 @@ import java.io.FileNotFoundException;
 import java.io.FileReader;
 import java.io.FileWriter;
 import java.io.IOException;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer;
 import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker;
 import org.apache.hadoop.hbase.zookeeper.ZKUtil;
 import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
 import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -48,7 +48,8 @@ import org.slf4j.LoggerFactory;
  * file, and use it to delete it. for a master, as the znode path constant 
whatever the server, we
  * check its content to make sure that the backup server is not now in 
charge.</p>
  */
-public class ZNodeClearer {
[email protected]
+public final class ZNodeClearer {
   private static final Logger LOG = 
LoggerFactory.getLogger(ZNodeClearer.class);
 
   private ZNodeClearer() {}

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.java
index 93c9690..a4daaf0 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.backup.example;
 import java.util.List;
 import java.util.Set;
 import java.util.TreeSet;
-
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -30,6 +30,7 @@ import org.slf4j.LoggerFactory;
  * <p>
  * It is internally synchronized to ensure consistent view of the table state.
  */
[email protected]
 public class HFileArchiveTableMonitor {
   private static final Logger LOG = 
LoggerFactory.getLogger(HFileArchiveTableMonitor.class);
   private final Set<String> archivedTables = new TreeSet<>();

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java
index 9ea67c1..bc3d85e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java
@@ -20,6 +20,7 @@
 
 package org.apache.hadoop.hbase.fs;
 
+import edu.umd.cs.findbugs.annotations.Nullable;
 import java.io.Closeable;
 import java.io.IOException;
 import java.lang.reflect.Field;
@@ -30,7 +31,6 @@ import java.lang.reflect.Modifier;
 import java.lang.reflect.Proxy;
 import java.lang.reflect.UndeclaredThrowableException;
 import java.net.URI;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -52,16 +52,16 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.util.Progressable;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import edu.umd.cs.findbugs.annotations.Nullable;
-
 /**
  * An encapsulation for the FileSystem object that hbase uses to access
  * data. This class allows the flexibility of using
  * separate filesystem objects for reading and writing hfiles and wals.
  */
[email protected]
 public class HFileSystem extends FilterFileSystem {
   public static final Logger LOG = LoggerFactory.getLogger(HFileSystem.class);
 
@@ -128,8 +128,8 @@ public class HFileSystem extends FilterFileSystem {
   }
 
   /**
-   * Returns the filesystem that is specially setup for 
-   * doing reads from storage. This object avoids doing 
+   * Returns the filesystem that is specially setup for
+   * doing reads from storage. This object avoids doing
    * checksum verifications for reads.
    * @return The FileSystem object that can be used to read data
    *         from files.
@@ -473,7 +473,7 @@ public class HFileSystem extends FilterFileSystem {
   }
 
   /**
-   * The org.apache.hadoop.fs.FilterFileSystem does not yet support 
+   * The org.apache.hadoop.fs.FilterFileSystem does not yet support
    * createNonRecursive. This is a hadoop bug and when it is fixed in Hadoop,
    * this definition will go away.
    */

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/io/MetricsIO.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/MetricsIO.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/MetricsIO.java
index 344819b..3634ccb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/MetricsIO.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/MetricsIO.java
@@ -20,8 +20,11 @@ package org.apache.hadoop.hbase.io;
 
 import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
 import org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactory;
+import org.apache.yetus.audience.InterfaceAudience;
+
 import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
 
[email protected]
 public class MetricsIO {
 
   private final MetricsIOSource source;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/io/MetricsIOWrapperImpl.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/MetricsIOWrapperImpl.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/MetricsIOWrapperImpl.java
index 82eb74c..1ce762a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/MetricsIOWrapperImpl.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/MetricsIOWrapperImpl.java
@@ -19,7 +19,9 @@
 package org.apache.hadoop.hbase.io;
 
 import org.apache.hadoop.hbase.io.hfile.HFile;
+import org.apache.yetus.audience.InterfaceAudience;
 
[email protected]
 public class MetricsIOWrapperImpl implements MetricsIOWrapper {
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AgeSnapshot.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AgeSnapshot.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AgeSnapshot.java
index bb80abe..054d54b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AgeSnapshot.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AgeSnapshot.java
@@ -19,11 +19,13 @@ package org.apache.hadoop.hbase.io.hfile;
 
 import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 import org.apache.hadoop.hbase.metrics.impl.FastLongHistogram;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Snapshot of block cache age in cache.
  * This object is preferred because we can control how it is serialized out 
when JSON'ing.
  */
[email protected]
 @JsonIgnoreProperties({"ageHistogram", "snapshot"})
 public class AgeSnapshot {
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockPriority.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockPriority.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockPriority.java
index 9d4ac87..d2ac6f0 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockPriority.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockPriority.java
@@ -17,6 +17,9 @@
  */
 package org.apache.hadoop.hbase.io.hfile;
 
+import org.apache.yetus.audience.InterfaceAudience;
+
[email protected]
 public enum BlockPriority {
   /**
    * Accessed a single time (used for scan-resistance)

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockWithScanInfo.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockWithScanInfo.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockWithScanInfo.java
index 4a5bb64..b8f8357 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockWithScanInfo.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockWithScanInfo.java
@@ -18,11 +18,13 @@
 package org.apache.hadoop.hbase.io.hfile;
 
 import org.apache.hadoop.hbase.Cell;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * BlockWithScanInfo is wrapper class for HFileBlock with other attributes. 
These attributes are
  * supposed to be much cheaper to be maintained in each caller thread than in 
HFileBlock itself.
  */
[email protected]
 public class BlockWithScanInfo {
   private final HFileBlock hFileBlock;
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java
index ce8d533..175fb83 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java
@@ -28,15 +28,16 @@ import java.util.concurrent.ScheduledThreadPoolExecutor;
 import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.TimeUnit;
 import java.util.regex.Pattern;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-public class PrefetchExecutor {
[email protected]
+public final class PrefetchExecutor {
 
   private static final Logger LOG = 
LoggerFactory.getLogger(PrefetchExecutor.class);
 
@@ -130,9 +131,10 @@ public class PrefetchExecutor {
   public static boolean isCompleted(Path path) {
     Future<?> future = prefetchFutures.get(path);
     if (future != null) {
-      return future.isDone(); 
+      return future.isDone();
     }
     return true;
   }
 
+  private PrefetchExecutor() {}
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/EmptyServiceNameException.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/EmptyServiceNameException.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/EmptyServiceNameException.java
index 2fab38a..9ca2927 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/EmptyServiceNameException.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/EmptyServiceNameException.java
@@ -17,5 +17,8 @@
  */
 package org.apache.hadoop.hbase.ipc;
 
+import org.apache.yetus.audience.InterfaceAudience;
+
[email protected]
 @SuppressWarnings("serial")
 public class EmptyServiceNameException extends FatalConnectionException {}

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java
index d379e5f..ce0f86d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java
@@ -23,9 +23,9 @@ import java.util.concurrent.ArrayBlockingQueue;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.DaemonThreadFactory;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -36,6 +36,7 @@ import 
org.apache.hbase.thirdparty.io.netty.util.internal.StringUtil;
  *
  * This can be used for HMaster, where no prioritization is needed.
  */
[email protected]
 public class FifoRpcScheduler extends RpcScheduler {
   private static final Logger LOG = 
LoggerFactory.getLogger(FifoRpcScheduler.class);
   private final int handlerCount;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapperImpl.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapperImpl.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapperImpl.java
index 4afcc33..8753ebb 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapperImpl.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapperImpl.java
@@ -19,6 +19,9 @@
 
 package org.apache.hadoop.hbase.ipc;
 
+import org.apache.yetus.audience.InterfaceAudience;
+
[email protected]
 public class MetricsHBaseServerWrapperImpl implements 
MetricsHBaseServerWrapper {
 
   private RpcServer server;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/QosPriority.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/QosPriority.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/QosPriority.java
index 9e51d2c..ca1546c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/QosPriority.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/QosPriority.java
@@ -19,14 +19,15 @@ package org.apache.hadoop.hbase.ipc;
 
 import java.lang.annotation.Retention;
 import java.lang.annotation.RetentionPolicy;
-
 import org.apache.hadoop.hbase.HConstants;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Annotation which decorates RPC methods to denote the relative priority 
among other RPCs in the
  * same server. Provides a basic notion of quality of service (QOS).
  */
 @Retention(RetentionPolicy.RUNTIME)
[email protected]
 public @interface QosPriority {
   int priority() default HConstants.NORMAL_QOS;
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/UnknownServiceException.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/UnknownServiceException.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/UnknownServiceException.java
index 87e78c2..5b4a2c2 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/UnknownServiceException.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/UnknownServiceException.java
@@ -17,7 +17,10 @@
  */
 package org.apache.hadoop.hbase.ipc;
 
+import org.apache.yetus.audience.InterfaceAudience;
+
 @SuppressWarnings("serial")
[email protected]
 public class UnknownServiceException extends FatalConnectionException {
   UnknownServiceException(final String msg) {
     super(msg);

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java
index f25f3bf..61eb28f 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java
@@ -17,16 +17,19 @@
  */
 package org.apache.hadoop.hbase.master;
 
-import org.apache.hbase.thirdparty.com.google.protobuf.Message;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.regionserver.AnnotationReadingPriorityFunction;
+import org.apache.hadoop.hbase.regionserver.RSRpcServices;
+import org.apache.hadoop.hbase.security.User;
+import org.apache.yetus.audience.InterfaceAudience;
+
+import org.apache.hbase.thirdparty.com.google.protobuf.Message;
+
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos;
-import org.apache.hadoop.hbase.regionserver.AnnotationReadingPriorityFunction;
-import org.apache.hadoop.hbase.regionserver.RSRpcServices;
-import org.apache.hadoop.hbase.security.User;
 
 /**
  * Priority function specifically for the master.
@@ -41,6 +44,7 @@ import org.apache.hadoop.hbase.security.User;
  * that all requests to transition meta are handled in different threads from 
other report region
  * in transition calls.
  */
[email protected]
 public class MasterAnnotationReadingPriorityFunction extends 
AnnotationReadingPriorityFunction {
   public MasterAnnotationReadingPriorityFunction(final RSRpcServices 
rpcServices) {
     this(rpcServices, rpcServices.getClass());

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsAssignmentManager.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsAssignmentManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsAssignmentManager.java
index 7d7dd81..d13ffe9 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsAssignmentManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsAssignmentManager.java
@@ -18,11 +18,13 @@
 
 package org.apache.hadoop.hbase.master;
 
+import static 
org.apache.hadoop.hbase.master.MetricsMaster.convertToProcedureMetrics;
+
 import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
 import org.apache.hadoop.hbase.procedure2.ProcedureMetrics;
+import org.apache.yetus.audience.InterfaceAudience;
 
-import static 
org.apache.hadoop.hbase.master.MetricsMaster.convertToProcedureMetrics;
-
[email protected]
 public class MetricsAssignmentManager {
   private final MetricsAssignmentManagerSource assignmentManagerSource;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterFileSystem.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterFileSystem.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterFileSystem.java
index 45dbeb8..609ac19 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterFileSystem.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterFileSystem.java
@@ -19,7 +19,9 @@
 package org.apache.hadoop.hbase.master;
 
 import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
+import org.apache.yetus.audience.InterfaceAudience;
 
[email protected]
 public class MetricsMasterFileSystem {
 
   private final MetricsMasterFileSystemSource source;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsSnapshot.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsSnapshot.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsSnapshot.java
index 2d7c797..483552d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsSnapshot.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsSnapshot.java
@@ -19,7 +19,9 @@
 package org.apache.hadoop.hbase.master;
 
 import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
+import org.apache.yetus.audience.InterfaceAudience;
 
[email protected]
 public class MetricsSnapshot {
 
   private final MetricsSnapshotSource source;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
index 36f57f2..44dfe41 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
@@ -51,6 +51,7 @@ import org.apache.hadoop.hbase.master.MasterServices;
 import org.apache.hadoop.hbase.master.RackManager;
 import org.apache.hadoop.hbase.master.RegionPlan;
 import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -67,6 +68,7 @@ import 
org.apache.hbase.thirdparty.com.google.common.collect.Sets;
  * actual balancing algorithm.
  *
  */
[email protected]
 public abstract class BaseLoadBalancer implements LoadBalancer {
   protected static final int MIN_SERVER_BALANCE = 2;
   private volatile boolean stopped = false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/ClusterLoadState.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/ClusterLoadState.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/ClusterLoadState.java
index a783a07..5d1e1cc 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/ClusterLoadState.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/ClusterLoadState.java
@@ -21,13 +21,14 @@ import java.util.List;
 import java.util.Map;
 import java.util.NavigableMap;
 import java.util.TreeMap;
-
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Class used to hold the current state of the cluster and how balanced it is.
  */
[email protected]
 public class ClusterLoadState {
   private final Map<ServerName, List<RegionInfo>> clusterState;
   private final NavigableMap<ServerAndLoad, List<RegionInfo>> serversByLoad;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
index b652610..5a6659e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
@@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.master.LoadBalancer;
 import org.apache.hadoop.hbase.master.MasterServices;
 import org.apache.hadoop.hbase.master.RegionPlan;
 import org.apache.hadoop.hbase.util.Pair;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -66,6 +67,7 @@ import 
org.apache.hbase.thirdparty.com.google.common.collect.Sets;
  * {@link 
org.apache.hadoop.hbase.master.balancer.StochasticLoadBalancer.CandidateGenerator}
  *
  */
[email protected]
 public class FavoredStochasticBalancer extends StochasticLoadBalancer 
implements
     FavoredNodesPromoter {
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/MetricsBalancer.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/MetricsBalancer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/MetricsBalancer.java
index 3707536..c421269 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/MetricsBalancer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/MetricsBalancer.java
@@ -19,10 +19,12 @@
 package org.apache.hadoop.hbase.master.balancer;
 
 import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Faced for exposing metrics about the balancer.
  */
[email protected]
 public class MetricsBalancer {
 
   private MetricsBalancerSource source = null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/MetricsStochasticBalancer.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/MetricsStochasticBalancer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/MetricsStochasticBalancer.java
index 850a9f5..ee24ff3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/MetricsStochasticBalancer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/MetricsStochasticBalancer.java
@@ -19,11 +19,13 @@
 package org.apache.hadoop.hbase.master.balancer;
 
 import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * This metrics balancer uses extended source for stochastic load balancer
  * to report its related metrics to JMX. For details, refer to HBASE-13965
  */
[email protected]
 public class MetricsStochasticBalancer extends MetricsBalancer {
   /**
    * Use the stochastic source instead of the default source.

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/BaseFileCleanerDelegate.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/BaseFileCleanerDelegate.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/BaseFileCleanerDelegate.java
index ed0ec9f..b587200 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/BaseFileCleanerDelegate.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/BaseFileCleanerDelegate.java
@@ -17,17 +17,18 @@
  */
 package org.apache.hadoop.hbase.master.cleaner;
 
+import java.util.Map;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.hbase.BaseConfigurable;
+import org.apache.yetus.audience.InterfaceAudience;
 
 import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
 
-import java.util.Map;
-
 /**
  * Base class for file cleaners which allows subclasses to implement a simple
  * isFileDeletable method (which used to be the FileCleanerDelegate contract).
  */
[email protected]
 public abstract class BaseFileCleanerDelegate extends BaseConfigurable
 implements FileCleanerDelegate {
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
index fdf5141..46f6217 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
@@ -17,13 +17,6 @@
  */
 package org.apache.hadoop.hbase.master.cleaner;
 
-import org.apache.hadoop.hbase.conf.ConfigurationObserver;
-import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-import org.apache.hbase.thirdparty.com.google.common.base.Predicate;
-import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet;
-import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
-import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-
 import java.io.IOException;
 import java.util.Comparator;
 import java.util.HashMap;
@@ -34,24 +27,32 @@ import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ForkJoinPool;
 import java.util.concurrent.RecursiveTask;
 import java.util.concurrent.atomic.AtomicBoolean;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.ScheduledChore;
 import org.apache.hadoop.hbase.Stoppable;
+import org.apache.hadoop.hbase.conf.ConfigurationObserver;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.ipc.RemoteException;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+import org.apache.hbase.thirdparty.com.google.common.base.Predicate;
+import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet;
+import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
+import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+
 /**
  * Abstract Cleaner that uses a chain of delegates to clean a directory of 
files
  * @param <T> Cleaner delegate class that is dynamically loaded from 
configuration
  */
 
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD",
     justification="TODO: Fix. It is wonky have static pool initialized from 
instance")
[email protected]
 public abstract class CleanerChore<T extends FileCleanerDelegate> extends 
ScheduledChore
     implements ConfigurationObserver {
 
@@ -315,7 +316,7 @@ public abstract class CleanerChore<T extends 
FileCleanerDelegate> extends Schedu
       }
 
       Iterable<FileStatus> filteredFiles = 
cleaner.getDeletableFiles(deletableValidFiles);
-      
+
       // trace which cleaner is holding on to each file
       if (LOG.isTraceEnabled()) {
         ImmutableSet<FileStatus> filteredFileSet = 
ImmutableSet.copyOf(filteredFiles);
@@ -325,10 +326,10 @@ public abstract class CleanerChore<T extends 
FileCleanerDelegate> extends Schedu
           }
         }
       }
-      
+
       deletableValidFiles = filteredFiles;
     }
-    
+
     Iterable<FileStatus> filesToDelete = Iterables.concat(invalidFiles, 
deletableValidFiles);
     return deleteFiles(filesToDelete) == files.size();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java
index 6c78914..358fd61 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.master.ServerListener;
 import org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.ipc.RemoteException;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -55,6 +56,7 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RemoteProce
 /**
  * A remote procecdure dispatcher for regionservers.
  */
[email protected]
 public class RSProcedureDispatcher
     extends RemoteProcedureDispatcher<MasterProcedureEnv, ServerName>
     implements ServerListener {

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java
index 2234a1b..7572495 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.master.procedure;
 
 import java.io.IOException;
 import java.util.Set;
-
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.RegionInfo;
@@ -35,11 +34,13 @@ import 
org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
 import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
 import org.apache.hadoop.hbase.procedure2.StateMachineProcedure;
 import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
+
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RecoverMetaState;
@@ -50,6 +51,7 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.R
  * assigning meta region/s. Any place where meta is accessed and requires meta 
to be online, need to
  * submit this procedure instead of duplicating steps to recover meta in the 
code.
  */
[email protected]
 public class RecoverMetaProcedure
     extends StateMachineProcedure<MasterProcedureEnv, 
MasterProcedureProtos.RecoverMetaState>
     implements TableProcedureInterface {

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
index 3d66072..5d8d6fa 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
@@ -22,7 +22,6 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
-
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.RegionInfo;
@@ -36,6 +35,7 @@ import 
org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
 import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
 import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
 import org.apache.hadoop.hbase.procedure2.StateMachineProcedure;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -53,6 +53,7 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.S
  * queued on the rpc should have been notified about fail and should be 
concurrently
  * getting themselves ready to assign elsewhere.
  */
[email protected]
 public class ServerCrashProcedure
 extends StateMachineProcedure<MasterProcedureEnv, ServerCrashState>
 implements ServerProcedureInterface {

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/MasterProcedureManagerHost.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/MasterProcedureManagerHost.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/MasterProcedureManagerHost.java
index 222c933..736257f 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/MasterProcedureManagerHost.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/MasterProcedureManagerHost.java
@@ -19,17 +19,18 @@ package org.apache.hadoop.hbase.procedure;
 
 import java.io.IOException;
 import java.util.Hashtable;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.master.MasterServices;
 import org.apache.hadoop.hbase.master.MetricsMaster;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.zookeeper.KeeperException;
 
 /**
  * Provides the globally barriered procedure framework and environment for
- * master oriented operations. {@link org.apache.hadoop.hbase.master.HMaster} 
+ * master oriented operations. {@link org.apache.hadoop.hbase.master.HMaster}
  * interacts with the loaded procedure manager through this class.
  */
[email protected]
 public class MasterProcedureManagerHost extends
     ProcedureManagerHost<MasterProcedureManager> {
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/RegionServerProcedureManagerHost.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/RegionServerProcedureManagerHost.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/RegionServerProcedureManagerHost.java
index 4c01eb8..5ae7a44 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/RegionServerProcedureManagerHost.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/RegionServerProcedureManagerHost.java
@@ -18,21 +18,22 @@
 package org.apache.hadoop.hbase.procedure;
 
 import java.io.IOException;
-
 import org.apache.hadoop.conf.Configuration;
 import 
org.apache.hadoop.hbase.procedure.flush.RegionServerFlushTableProcedureManager;
 import org.apache.hadoop.hbase.regionserver.RegionServerServices;
 import 
org.apache.hadoop.hbase.regionserver.snapshot.RegionServerSnapshotManager;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
  * Provides the globally barriered procedure framework and environment
- * for region server oriented operations. 
+ * for region server oriented operations.
  * {@link org.apache.hadoop.hbase.regionserver.HRegionServer} interacts
  * with the loaded procedure manager through this class.
  */
[email protected]
 public class RegionServerProcedureManagerHost extends
     ProcedureManagerHost<RegionServerProcedureManager> {
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java
index 507e272..4b69244 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java
@@ -20,12 +20,12 @@ package org.apache.hadoop.hbase.procedure;
 import java.io.IOException;
 import java.util.concurrent.Callable;
 import java.util.concurrent.CountDownLatch;
-
 import org.apache.hadoop.hbase.errorhandling.ForeignException;
 import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
 import org.apache.hadoop.hbase.errorhandling.ForeignExceptionListener;
 import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
 import org.apache.hadoop.hbase.errorhandling.TimeoutExceptionInjector;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -50,6 +50,7 @@ import org.slf4j.LoggerFactory;
  * There is a category of procedure (ex: online-snapshots), and a 
user-specified instance-specific
  * barrierName. (ex: snapshot121126).
  */
[email protected]
 abstract public class Subprocedure implements Callable<Void> {
   private static final Logger LOG = 
LoggerFactory.getLogger(Subprocedure.class);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableSpaceQuotaSnapshotNotifier.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableSpaceQuotaSnapshotNotifier.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableSpaceQuotaSnapshotNotifier.java
index d81d7d3..891a34e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableSpaceQuotaSnapshotNotifier.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableSpaceQuotaSnapshotNotifier.java
@@ -17,17 +17,18 @@
 package org.apache.hadoop.hbase.quotas;
 
 import java.io.IOException;
-
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
  * A {@link SpaceQuotaSnapshotNotifier} which uses the hbase:quota table.
  */
[email protected]
 public class TableSpaceQuotaSnapshotNotifier implements 
SpaceQuotaSnapshotNotifier {
   private static final Logger LOG = 
LoggerFactory.getLogger(TableSpaceQuotaSnapshotNotifier.class);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/MissingSnapshotViolationPolicyEnforcement.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/MissingSnapshotViolationPolicyEnforcement.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/MissingSnapshotViolationPolicyEnforcement.java
index 294954c..0760df8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/MissingSnapshotViolationPolicyEnforcement.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/MissingSnapshotViolationPolicyEnforcement.java
@@ -18,18 +18,20 @@ package org.apache.hadoop.hbase.quotas.policies;
 
 import java.io.IOException;
 import java.util.List;
-
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.quotas.SpaceLimitingException;
 import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * A {@link SpaceViolationPolicyEnforcement} which can be treated as a 
singleton. When a quota is
  * not defined on a table or we lack quota information, we want to avoid 
creating a policy, keeping
  * this path fast.
  */
-public class MissingSnapshotViolationPolicyEnforcement extends 
AbstractViolationPolicyEnforcement {
[email protected]
+public final class MissingSnapshotViolationPolicyEnforcement
+  extends AbstractViolationPolicyEnforcement {
   private static final MissingSnapshotViolationPolicyEnforcement SINGLETON =
       new MissingSnapshotViolationPolicyEnforcement();
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DumpRegionServerMetrics.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DumpRegionServerMetrics.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DumpRegionServerMetrics.java
index 2b07a64..4d49ea2 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DumpRegionServerMetrics.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DumpRegionServerMetrics.java
@@ -16,8 +16,6 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
-import org.apache.hadoop.hbase.util.JSONBean;
-
 import java.io.IOException;
 import java.io.PrintWriter;
 import java.io.StringWriter;
@@ -25,11 +23,14 @@ import java.lang.management.ManagementFactory;
 import javax.management.MBeanServer;
 import javax.management.MalformedObjectNameException;
 import javax.management.ObjectName;
+import org.apache.hadoop.hbase.util.JSONBean;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Utility for doing JSON and MBeans.
  */
-public class DumpRegionServerMetrics {
[email protected]
+public final class DumpRegionServerMetrics {
   /**
    * Dump out a subset of regionserver mbeans only, not all of them, as json 
on System.out.
    */
@@ -57,4 +58,6 @@ public class DumpRegionServerMetrics {
     String str = dumpMetrics();
     System.out.println(str);
   }
+
+  private DumpRegionServerMetrics() {}
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SteppingSplitPolicy.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SteppingSplitPolicy.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SteppingSplitPolicy.java
index 9753080..84973db 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SteppingSplitPolicy.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SteppingSplitPolicy.java
@@ -17,6 +17,9 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
+import org.apache.yetus.audience.InterfaceAudience;
+
[email protected]
 public class SteppingSplitPolicy extends 
IncreasingToUpperBoundRegionSplitPolicy {
   /**
    * @return flushSize * 2 if there's exactly one region of the table in 
question

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionRequest.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionRequest.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionRequest.java
index 03571d5..37b7059 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionRequest.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionRequest.java
@@ -20,11 +20,12 @@ package org.apache.hadoop.hbase.regionserver.compactions;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
-
 import org.apache.hadoop.hbase.regionserver.HStoreFile;
+import org.apache.yetus.audience.InterfaceAudience;
 
 
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="EQ_DOESNT_OVERRIDE_EQUALS",
   justification="It is intended to use the same equal method as superclass")
[email protected]
 public class DateTieredCompactionRequest extends CompactionRequestImpl {
   private List<Long> boundaries;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java
index efbedc5..6814640 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java
@@ -21,19 +21,22 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
-
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellBuilderType;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.ExtendedCellBuilder;
 import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
 import org.apache.hadoop.hbase.wal.WALEdit;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
 import org.apache.hbase.thirdparty.com.google.common.base.Predicate;
+
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor;
 
[email protected]
 public class BulkLoadCellFilter {
   private static final Logger LOG = 
LoggerFactory.getLogger(BulkLoadCellFilter.class);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java
index 22e8628..a960c31 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java
@@ -54,6 +54,7 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil;
 import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -66,7 +67,7 @@ import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.AtomicLongM
  * Arguments: --distributed    Polls each RS to dump information about the 
queue
  *            --hdfs           Reports HDFS usage by the replication queues 
(note: can be overestimated).
  */
-
[email protected]
 public class DumpReplicationQueues extends Configured implements Tool {
 
   private static final Logger LOG = 
LoggerFactory.getLogger(DumpReplicationQueues.class.getName());

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java
index 4c8a752..af6888c 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java
@@ -22,18 +22,19 @@ import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.Random;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.client.ClusterConnection;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.replication.HBaseReplicationEndpoint;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
 import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
 import org.apache.hbase.thirdparty.com.google.common.collect.Maps;
+
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
 
 /**
@@ -41,6 +42,7 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminServic
  * single peer to replicate to per set of data to replicate. Also handles
  * keeping track of peer availability.
  */
[email protected]
 public class ReplicationSinkManager {
 
   private static final Logger LOG = 
LoggerFactory.getLogger(ReplicationSinkManager.class);

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
index c2862de..62068fd 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
@@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
 import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * In a scenario of Replication based Disaster/Recovery, when hbase 
Master-Cluster crashes, this
@@ -47,6 +48,7 @@ import org.apache.hadoop.util.ToolRunner;
  * hbase org.apache.hadoop.hbase.replication.regionserver.ReplicationSyncUp
  * </pre>
  */
[email protected]
 public class ReplicationSyncUp extends Configured implements Tool {
 
   private static final long SLEEP_TIME = 10000;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java
index f3e4853..b25b7e2 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java
@@ -29,12 +29,12 @@ import java.util.SortedMap;
 import java.util.SortedSet;
 import java.util.TreeMap;
 import java.util.TreeSet;
-
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparator;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.Tag;
 import 
org.apache.hadoop.hbase.regionserver.querymatcher.NewVersionBehaviorTracker;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -42,6 +42,7 @@ import org.slf4j.LoggerFactory;
  * Similar to MvccSensitiveTracker but tracks the visibility expression also 
before
  * deciding if a Cell can be considered deleted
  */
[email protected]
 public class VisibilityNewVersionBehaivorTracker extends 
NewVersionBehaviorTracker {
   private static final Logger LOG =
       LoggerFactory.getLogger(VisibilityNewVersionBehaivorTracker.class);

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplication.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplication.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplication.java
index 6887c31..e39d601 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplication.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplication.java
@@ -19,6 +19,8 @@
 
 package org.apache.hadoop.hbase.security.visibility;
 
+import java.io.IOException;
+import java.util.Optional;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
@@ -26,9 +28,7 @@ import 
org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessor;
 import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
 import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
-
-import java.io.IOException;
-import java.util.Optional;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * A RegionServerObserver impl that provides the custom
@@ -37,6 +37,7 @@ import java.util.Optional;
  * replicated as string.  The value for the configuration should be
  * 
'org.apache.hadoop.hbase.security.visibility.VisibilityController$VisibilityReplication'.
  */
[email protected]
 public class VisibilityReplication implements RegionServerCoprocessor, 
RegionServerObserver {
   private Configuration conf;
   private VisibilityLabelService visibilityLabelService;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/CreateSnapshot.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/CreateSnapshot.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/CreateSnapshot.java
index b1ee66e..af9ce88 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/CreateSnapshot.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/CreateSnapshot.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.snapshot;
 
 import java.util.Arrays;
 import java.util.Locale;
-
 import org.apache.commons.cli.CommandLine;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
@@ -28,13 +27,13 @@ import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.SnapshotDescription;
 import org.apache.hadoop.hbase.client.SnapshotType;
-import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * This is a command line class that will snapshot a given table.
  */
[email protected]
 public class CreateSnapshot extends AbstractHBaseTool {
     private SnapshotType snapshotType = SnapshotType.FLUSH;
     private TableName tableName = null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/WriteSinkCoprocessor.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/WriteSinkCoprocessor.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/WriteSinkCoprocessor.java
index 738ffc2..9311200 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/WriteSinkCoprocessor.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/WriteSinkCoprocessor.java
@@ -18,6 +18,9 @@
  */
 package org.apache.hadoop.hbase.tool;
 
+import java.io.IOException;
+import java.util.Optional;
+import java.util.concurrent.atomic.AtomicLong;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
@@ -26,13 +29,10 @@ import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.RegionObserver;
 import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
 import org.apache.hadoop.hbase.regionserver.OperationStatus;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.util.Optional;
-import java.util.concurrent.atomic.AtomicLong;
-
 /**
  * <p>
  * This coprocessor 'shallows' all the writes. It allows to test a pure
@@ -59,7 +59,9 @@ import java.util.concurrent.atomic.AtomicLong;
  * Will return:
  * 0 row(s) in 0.0050 seconds
  * </p>
+ * TODO: It needs tests
  */
[email protected]
 public class WriteSinkCoprocessor implements RegionCoprocessor, RegionObserver 
{
   private static final Logger LOG = 
LoggerFactory.getLogger(WriteSinkCoprocessor.class);
   private final AtomicLong ops = new AtomicLong();

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/util/GetJavaProperty.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/GetJavaProperty.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/GetJavaProperty.java
index b12c592..2d4de3b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/GetJavaProperty.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/GetJavaProperty.java
@@ -18,10 +18,13 @@
  */
 package org.apache.hadoop.hbase.util;
 
+import org.apache.yetus.audience.InterfaceAudience;
+
 /**
  * A generic way for querying Java properties.
  */
-public class GetJavaProperty {
[email protected]
+public final class GetJavaProperty {
   public static void main(String args[]) {
     if (args.length == 0) {
       for (Object prop: System.getProperties().keySet()) {
@@ -33,4 +36,6 @@ public class GetJavaProperty {
       }
     }
   }
+
+  private GetJavaProperty() {}
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HFileArchiveUtil.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HFileArchiveUtil.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HFileArchiveUtil.java
index fb99cba..03ed373 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HFileArchiveUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HFileArchiveUtil.java
@@ -26,11 +26,13 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HStore;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Helper class for all utilities related to archival/retrieval of HFiles
  */
-public class HFileArchiveUtil {
[email protected]
+public final class HFileArchiveUtil {
   private HFileArchiveUtil() {
     // non-external instantiation - util class
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RollingStatCalculator.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RollingStatCalculator.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RollingStatCalculator.java
index 554d6f5..fb2a954 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RollingStatCalculator.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RollingStatCalculator.java
@@ -19,6 +19,8 @@
 
 package org.apache.hadoop.hbase.util;
 
+import org.apache.yetus.audience.InterfaceAudience;
+
 /**
  * This class maintains mean and variation for any sequence of input provided 
to it.
  * It is initialized with number of rolling periods which basically means the 
number of past
@@ -30,6 +32,7 @@ package org.apache.hadoop.hbase.util;
  * from the start the statistics may behave like constants and may ignore 
short trends.
  * All operations are O(1) except the initialization which is O(N).
  */
[email protected]
 public class RollingStatCalculator {
   private double currentSum;
   private double currentSqrSum;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
index 8a43d17..769d480 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hbase.util;
 
 import java.io.IOException;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -35,12 +34,14 @@ import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
 import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
 import 
org.apache.hadoop.hbase.replication.regionserver.RegionReplicaReplicationEndpoint;
 import org.apache.hadoop.hbase.zookeeper.ZKConfig;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
  * Similar to {@link RegionReplicaUtil} but for the server side
  */
[email protected]
 public class ServerRegionReplicaUtil extends RegionReplicaUtil {
 
   private static final Logger LOG = 
LoggerFactory.getLogger(ServerRegionReplicaUtil.class);

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ShutdownHookManager.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ShutdownHookManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ShutdownHookManager.java
index e731bd7..17da681 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ShutdownHookManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ShutdownHookManager.java
@@ -17,13 +17,15 @@
  */
 package org.apache.hadoop.hbase.util;
 
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * This class provides ShutdownHookManager shims for HBase to interact with 
the Hadoop 1.0.x and the
  * Hadoop 2.0+ series.
- * 
+ *
  * NOTE: No testing done against 0.22.x, or 0.21.x.
  */
[email protected]
 abstract public class ShutdownHookManager {
   private static ShutdownHookManager instance;
 
@@ -39,13 +41,13 @@ abstract public class ShutdownHookManager {
   }
 
   abstract public void addShutdownHook(Thread shutdownHook, int priority);
-  
+
   abstract public boolean removeShutdownHook(Runnable shutdownHook);
-    
+
   public static void affixShutdownHook(Thread shutdownHook, int priority) {
     instance.addShutdownHook(shutdownHook, priority);
   }
-  
+
   public static boolean deleteShutdownHook(Runnable shutdownHook) {
     return instance.removeShutdownHook(shutdownHook);
   }
@@ -56,14 +58,14 @@ abstract public class ShutdownHookManager {
     public void addShutdownHook(Thread shutdownHookThread, int priority) {
       Runtime.getRuntime().addShutdownHook(shutdownHookThread);
     }
-    
+
     @Override
     public boolean removeShutdownHook(Runnable shutdownHook) {
       Thread shutdownHookThread = null;
       if (!(shutdownHook instanceof Thread)) {
         shutdownHookThread = new Thread(shutdownHook);
       } else shutdownHookThread = (Thread) shutdownHook;
-      
+
       return Runtime.getRuntime().removeShutdownHook(shutdownHookThread);
     }
   }
@@ -72,7 +74,7 @@ abstract public class ShutdownHookManager {
     @Override
     public void addShutdownHook(Thread shutdownHookThread, int priority) {
       try {
-        Methods.call(shutdownHookManagerClass, 
+        Methods.call(shutdownHookManagerClass,
             Methods.call(shutdownHookManagerClass, null, "get", null, null),
             "addShutdownHook",
             new Class[] { Runnable.class, int.class },
@@ -81,12 +83,12 @@ abstract public class ShutdownHookManager {
         throw new RuntimeException("we could not use 
ShutdownHookManager.addShutdownHook", ex);
       }
     }
-    
+
     @Override
     public boolean removeShutdownHook(Runnable shutdownHook) {
       try {
         return (Boolean)
-        Methods.call(shutdownHookManagerClass, 
+        Methods.call(shutdownHookManagerClass,
             Methods.call(shutdownHookManagerClass, null, "get", null, null),
             "removeShutdownHook",
             new Class[] { Runnable.class },

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/util/SortedList.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/SortedList.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/SortedList.java
index 3f5576e..f896e55 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/SortedList.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/SortedList.java
@@ -26,6 +26,7 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.ListIterator;
 import java.util.RandomAccess;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Simple sorted list implementation that uses {@link java.util.ArrayList} as
@@ -38,7 +39,7 @@ import java.util.RandomAccess;
  * time of invocation, so will not see any mutations to the collection during
  * their operation. Iterating over list elements manually using the
  * RandomAccess pattern involves multiple operations. For this to be safe get
- * a reference to the internal list first using get(). 
+ * a reference to the internal list first using get().
  * <p>
  * If constructed with a {@link java.util.Comparator}, the list will be sorted
  * using the comparator. Adding or changing an element using an index will
@@ -48,6 +49,7 @@ import java.util.RandomAccess;
  */
 
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="UG_SYNC_SET_UNSYNC_GET",
   justification="TODO: synchronization in here needs review!!!")
[email protected]
 public class SortedList<E> implements List<E>, RandomAccess {
   private volatile List<E> list;
   private final Comparator<? super E> comparator;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/util/YammerHistogramUtils.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/YammerHistogramUtils.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/YammerHistogramUtils.java
index 742a2ef..3070fb3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/YammerHistogramUtils.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/YammerHistogramUtils.java
@@ -21,11 +21,12 @@ package org.apache.hadoop.hbase.util;
 import com.codahale.metrics.Histogram;
 import com.codahale.metrics.Reservoir;
 import com.codahale.metrics.Snapshot;
-
 import java.lang.reflect.Constructor;
 import java.text.DecimalFormat;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /** Utility functions for working with Yammer Metrics. */
[email protected]
 public final class YammerHistogramUtils {
 
   // not for public consumption

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.java
index 4ca0e74..d5f5f53 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.util.hbck;
 
 import java.io.IOException;
 import java.util.Collection;
-
 import org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo;
 import org.apache.hadoop.hbase.util.HBaseFsck.TableInfo;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * This interface provides callbacks for handling particular table integrity
@@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.util.HBaseFsck.TableInfo;
  * and handling overlaps but currently preserves the older more specific error
  * condition codes.
  */
[email protected]
 public interface TableIntegrityErrorHandler {
 
   TableInfo getTableInfo();
@@ -48,7 +49,7 @@ public interface TableIntegrityErrorHandler {
    *    has an empty start key.
    */
   void handleRegionStartKeyNotEmpty(HbckInfo hi) throws IOException;
-  
+
   /**
    * Callback for handling case where a Table has a last region that does not
    * have an empty end key.
@@ -68,7 +69,7 @@ public interface TableIntegrityErrorHandler {
   /**
    * Callback for handling two regions that have the same start key.  This is
    * a specific case of a region overlap.
-   * @param hi1 one of the overlapping HbckInfo 
+   * @param hi1 one of the overlapping HbckInfo
    * @param hi2 the other overlapping HbckInfo
    */
   void handleDuplicateStartKeys(HbckInfo hi1, HbckInfo hi2) throws IOException;
@@ -96,7 +97,7 @@ public interface TableIntegrityErrorHandler {
    * Callback for handling a region hole between two keys.
    * @param holeStartKey key at the beginning of the region hole
    * @param holeEndKey key at the end of the region hole
-   
+
    */
   void handleHoleInRegionChain(byte[] holeStartKey, byte[] holeEndKey)
       throws IOException;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.java
index af379fd..96039b5 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.java
@@ -19,14 +19,15 @@ package org.apache.hadoop.hbase.util.hbck;
 
 import java.io.IOException;
 import java.util.Collection;
-
 import org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo;
 import org.apache.hadoop.hbase.util.HBaseFsck.TableInfo;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Simple implementation of TableIntegrityErrorHandler. Can be used as a base
  * class.
  */
[email protected]
 abstract public class TableIntegrityErrorHandlerImpl implements
     TableIntegrityErrorHandler {
   TableInfo ti;
@@ -53,7 +54,7 @@ abstract public class TableIntegrityErrorHandlerImpl 
implements
   @Override
   public void handleRegionStartKeyNotEmpty(HbckInfo hi) throws IOException {
   }
-  
+
   /**
    * {@inheritDoc}
    */

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/NettyAsyncFSWALConfigHelper.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/NettyAsyncFSWALConfigHelper.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/NettyAsyncFSWALConfigHelper.java
index 0836b5d..12b63f5 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/NettyAsyncFSWALConfigHelper.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/NettyAsyncFSWALConfigHelper.java
@@ -22,6 +22,7 @@ import java.util.Map;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.util.Pair;
+import org.apache.yetus.audience.InterfaceAudience;
 
 import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
 import org.apache.hbase.thirdparty.io.netty.channel.Channel;
@@ -31,7 +32,8 @@ import 
org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup;
  * Helper class for passing netty event loop config to {@link 
AsyncFSWALProvider}.
  * @since 2.0.0
  */
-public class NettyAsyncFSWALConfigHelper {
[email protected]
+public final class NettyAsyncFSWALConfigHelper {
 
   private static final String EVENT_LOOP_CONFIG = 
"hbase.wal.async.event-loop.config";
 
@@ -59,4 +61,6 @@ public class NettyAsyncFSWALConfigHelper {
     }
     return EVENT_LOOP_CONFIG_MAP.get(name);
   }
+
+  private NettyAsyncFSWALConfigHelper() {}
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-shell/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-shell/pom.xml b/hbase-shell/pom.xml
index 58ef7ed..8eaefaa 100644
--- a/hbase-shell/pom.xml
+++ b/hbase-shell/pom.xml
@@ -116,6 +116,10 @@
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>findbugs-maven-plugin</artifactId>
       </plugin>
+      <plugin>
+        <groupId>net.revelc.code</groupId>
+        <artifactId>warbucks-maven-plugin</artifactId>
+      </plugin>
     </plugins>
     <!-- General Resources -->
     <pluginManagement>

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-spark-it/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-spark-it/pom.xml b/hbase-spark-it/pom.xml
index 2cac0f9..74de0a0 100644
--- a/hbase-spark-it/pom.xml
+++ b/hbase-spark-it/pom.xml
@@ -180,6 +180,10 @@
           <failOnViolation>true</failOnViolation>
         </configuration>
       </plugin>
+      <plugin>
+        <groupId>net.revelc.code</groupId>
+        <artifactId>warbucks-maven-plugin</artifactId>
+      </plugin>
     </plugins>
   </build>
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-spark/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-spark/pom.xml b/hbase-spark/pom.xml
index 3aeb470..05fd779 100644
--- a/hbase-spark/pom.xml
+++ b/hbase-spark/pom.xml
@@ -563,6 +563,27 @@
           <failOnViolation>true</failOnViolation>
         </configuration>
       </plugin>
+      <plugin>
+        <groupId>net.revelc.code</groupId>
+        <artifactId>warbucks-maven-plugin</artifactId>
+        <!-- TODO: remove the following config if 
https://issues.scala-lang.org/browse/SI-3600 is resolved -->
+        <!-- override the root config to add more filter -->
+        <configuration>
+          <ignoreRuleFailures>true</ignoreRuleFailures>
+          <rules>
+            <rule>
+              <!-- exclude the generated java files and package object-->
+              
<classPattern>(?!.*(.generated.|.tmpl.|\$|org.apache.hadoop.hbase.spark.hbase.package)).*</classPattern>
+              <includeTestClasses>false</includeTestClasses>
+              <includePublicClasses>true</includePublicClasses>
+              
<includePackagePrivateClasses>false</includePackagePrivateClasses>
+              <includeProtectedClasses>false</includeProtectedClasses>
+              <includePrivateClasses>false</includePrivateClasses>
+              
<classAnnotationPattern>org[.]apache[.]yetus[.]audience[.]InterfaceAudience.*</classAnnotationPattern>
+            </rule>
+          </rules>
+        </configuration>
+      </plugin>
     </plugins>
   </build>
   <profiles>

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkDeleteExample.java
----------------------------------------------------------------------
diff --git 
a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkDeleteExample.java
 
b/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkDeleteExample.java
index d7c424e..8cf2c7f 100644
--- 
a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkDeleteExample.java
+++ 
b/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkDeleteExample.java
@@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.spark.example.hbasecontext;
 
 import java.util.ArrayList;
 import java.util.List;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.TableName;
@@ -29,11 +28,13 @@ import org.apache.spark.SparkConf;
 import org.apache.spark.api.java.JavaRDD;
 import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.spark.api.java.function.Function;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * This is a simple example of deleting records in HBase
  * with the bulkDelete function.
  */
[email protected]
 final public class JavaHBaseBulkDeleteExample {
 
   private JavaHBaseBulkDeleteExample() {}

Reply via email to