HBASE-20656 Validate pre-2.0 coprocessors against HBase 2.0+

Signed-off-by: Mike Drob <md...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c323e7bf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c323e7bf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c323e7bf

Branch: refs/heads/master
Commit: c323e7bfaa180dc9a9420ece2e191c21cd04b99a
Parents: eb13cdd
Author: Balazs Meszaros <balazs.mesza...@cloudera.com>
Authored: Wed May 23 13:49:19 2018 +0200
Committer: Mike Drob <md...@apache.org>
Committed: Mon Jun 11 10:26:58 2018 -0500

----------------------------------------------------------------------
 .../resources/hbase/checkstyle-suppressions.xml |    1 +
 .../hadoop/hbase/util/AbstractHBaseTool.java    |    7 +-
 .../hbase/coprocessor/BulkLoadObserver.java     |    8 +-
 .../hbase/tool/DataBlockEncodingValidator.java  |  108 ++
 .../hadoop/hbase/tool/PreUpgradeValidator.java  |  136 +--
 .../coprocessor/Branch1CoprocessorMethods.java  | 1137 ++++++++++++++++++
 .../tool/coprocessor/CoprocessorMethod.java     |   73 ++
 .../tool/coprocessor/CoprocessorMethods.java    |   66 +
 .../tool/coprocessor/CoprocessorValidator.java  |  247 ++++
 .../tool/coprocessor/CoprocessorViolation.java  |   56 +
 .../coprocessor/CurrentCoprocessorMethods.java  |   47 +
 .../coprocessor/CoprocessorValidatorTest.java   |  177 +++
 src/main/asciidoc/_chapters/ops_mgt.adoc        |   34 +-
 13 files changed, 2010 insertions(+), 87 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/c323e7bf/hbase-checkstyle/src/main/resources/hbase/checkstyle-suppressions.xml
----------------------------------------------------------------------
diff --git 
a/hbase-checkstyle/src/main/resources/hbase/checkstyle-suppressions.xml 
b/hbase-checkstyle/src/main/resources/hbase/checkstyle-suppressions.xml
index 9feb555..ad79163 100644
--- a/hbase-checkstyle/src/main/resources/hbase/checkstyle-suppressions.xml
+++ b/hbase-checkstyle/src/main/resources/hbase/checkstyle-suppressions.xml
@@ -38,4 +38,5 @@
   <suppress checks="InterfaceIsTypeCheck" files=".*/src/main/.*\.java"/>
   <suppress checks="EmptyBlockCheck" files="TBoundedThreadPoolServer.java"/>
   <suppress checks="EqualsHashCode" files="StartcodeAgnosticServerName.java"/>
+  <suppress checks="MethodLength" files="Branch1CoprocessorMethods.java"/>
 </suppressions>

http://git-wip-us.apache.org/repos/asf/hbase/blob/c323e7bf/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java
----------------------------------------------------------------------
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java
 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java
index 1dd7201..b454884 100644
--- 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java
+++ 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java
@@ -22,7 +22,6 @@ import java.util.Comparator;
 import java.util.HashMap;
 import java.util.List;
 
-import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.util.Tool;
@@ -46,9 +45,9 @@ import 
org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException;
  * command-line argument parsing.
  */
 @InterfaceAudience.Private
-public abstract class AbstractHBaseTool implements Tool, Configurable {
-  protected static final int EXIT_SUCCESS = 0;
-  protected static final int EXIT_FAILURE = 1;
+public abstract class AbstractHBaseTool implements Tool {
+  public static final int EXIT_SUCCESS = 0;
+  public static final int EXIT_FAILURE = 1;
 
   public static final String SHORT_HELP_OPTION = "h";
   public static final String LONG_HELP_OPTION = "help";

http://git-wip-us.apache.org/repos/asf/hbase/blob/c323e7bf/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BulkLoadObserver.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BulkLoadObserver.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BulkLoadObserver.java
index 25e6522..b69a727 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BulkLoadObserver.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BulkLoadObserver.java
@@ -21,13 +21,9 @@ package org.apache.hadoop.hbase.coprocessor;
 
 import java.io.IOException;
 
-import org.apache.hadoop.hbase.Coprocessor;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.yetus.audience.InterfaceStability;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest;
 
 /**
  * Coprocessors implement this interface to observe and mediate bulk load 
operations.
@@ -55,7 +51,7 @@ public interface BulkLoadObserver {
       * It can't bypass the default action, e.g., ctx.bypass() won't have 
effect.
       * If you need to get the region or table name, get it from the
       * <code>ctx</code> as follows: 
<code>code>ctx.getEnvironment().getRegion()</code>. Use
-      * getRegionInfo to fetch the encodedName and use getTabldDescriptor() to 
get the tableName.
+      * getRegionInfo to fetch the encodedName and use getTableDescriptor() to 
get the tableName.
       * @param ctx the environment to interact with the framework and master
       */
     default void 
prePrepareBulkLoad(ObserverContext<RegionCoprocessorEnvironment> ctx)
@@ -66,7 +62,7 @@ public interface BulkLoadObserver {
       * It can't bypass the default action, e.g., ctx.bypass() won't have 
effect.
       * If you need to get the region or table name, get it from the
       * <code>ctx</code> as follows: 
<code>code>ctx.getEnvironment().getRegion()</code>. Use
-      * getRegionInfo to fetch the encodedName and use getTabldDescriptor() to 
get the tableName.
+      * getRegionInfo to fetch the encodedName and use getTableDescriptor() to 
get the tableName.
       * @param ctx the environment to interact with the framework and master
       */
     default void 
preCleanupBulkLoad(ObserverContext<RegionCoprocessorEnvironment> ctx)

http://git-wip-us.apache.org/repos/asf/hbase/blob/c323e7bf/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/DataBlockEncodingValidator.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/DataBlockEncodingValidator.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/DataBlockEncodingValidator.java
new file mode 100644
index 0000000..e72521b
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/DataBlockEncodingValidator.java
@@ -0,0 +1,108 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.tool;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+import org.apache.hadoop.hbase.util.AbstractHBaseTool;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
+
+@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
+public class DataBlockEncodingValidator extends AbstractHBaseTool {
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(DataBlockEncodingValidator.class);
+  private static final byte[] DATA_BLOCK_ENCODING = 
Bytes.toBytes("DATA_BLOCK_ENCODING");
+
+  /**
+   * Check DataBlockEncodings of column families are compatible.
+   *
+   * @return number of column families with incompatible DataBlockEncoding
+   * @throws IOException if a remote or network exception occurs
+   */
+  private int validateDBE() throws IOException {
+    int incompatibilities = 0;
+
+    LOG.info("Validating Data Block Encodings");
+
+    try (Connection connection = ConnectionFactory.createConnection(getConf());
+        Admin admin = connection.getAdmin()) {
+      List<TableDescriptor> tableDescriptors = admin.listTableDescriptors();
+      String encoding = "";
+
+      for (TableDescriptor td : tableDescriptors) {
+        ColumnFamilyDescriptor[] columnFamilies = td.getColumnFamilies();
+        for (ColumnFamilyDescriptor cfd : columnFamilies) {
+          try {
+            encoding = Bytes.toString(cfd.getValue(DATA_BLOCK_ENCODING));
+            // IllegalArgumentException will be thrown if encoding is 
incompatible with 2.0
+            DataBlockEncoding.valueOf(encoding);
+          } catch (IllegalArgumentException e) {
+            incompatibilities++;
+            LOG.warn("Incompatible DataBlockEncoding for table: {}, cf: {}, 
encoding: {}",
+                td.getTableName().getNameAsString(), cfd.getNameAsString(), 
encoding);
+          }
+        }
+      }
+    }
+
+    if (incompatibilities > 0) {
+      LOG.warn("There are {} column families with incompatible Data Block 
Encodings. Do not "
+          + "upgrade until these encodings are converted to a supported one.", 
incompatibilities);
+      LOG.warn("Check 
http://hbase.apache.org/book.html#upgrade2.0.prefix-tree.removed "
+          + "for instructions.");
+    } else {
+      LOG.info("The used Data Block Encodings are compatible with HBase 2.0.");
+    }
+
+    return incompatibilities;
+  }
+
+  @Override
+  protected void printUsage() {
+    String header = "hbase " + PreUpgradeValidator.TOOL_NAME + " " +
+        PreUpgradeValidator.VALIDATE_DBE_NAME;
+    printUsage(header, null, "");
+  }
+
+  @Override
+  protected void addOptions() {
+  }
+
+  @Override
+  protected void processOptions(CommandLine cmd) {
+  }
+
+  @Override
+  protected int doWork() throws Exception {
+    return (validateDBE() == 0) ? EXIT_SUCCESS : EXIT_FAILURE;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/c323e7bf/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/PreUpgradeValidator.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/PreUpgradeValidator.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/PreUpgradeValidator.java
index 6fe5a92..a3c505e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/PreUpgradeValidator.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/PreUpgradeValidator.java
@@ -18,111 +18,99 @@
  */
 package org.apache.hadoop.hbase.tool;
 
-import java.io.IOException;
-import java.util.List;
+import java.util.Arrays;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
-import org.apache.hadoop.hbase.client.TableDescriptor;
-import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+import org.apache.hadoop.hbase.tool.coprocessor.CoprocessorValidator;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
-import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
-
 /**
  * Tool for validating that cluster can be upgraded from HBase 1.x to 2.0
  * <p>
  * Available validations:
  * <ul>
- * <li>all: Run all pre-upgrade validations</li>
- * <li>validateDBE: Check Data Block Encoding for column families</li>
+ * <li>validate-cp: Validates Co-processors compatibility</li>
+ * <li>validate-dbe: Check Data Block Encoding for column families</li>
  * </ul>
  * </p>
  */
 @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
-public class PreUpgradeValidator extends AbstractHBaseTool {
-
-  public static final String NAME = "pre-upgrade";
-  private static final Logger LOG = 
LoggerFactory.getLogger(PreUpgradeValidator.class);
-  private static final byte[] DATA_BLOCK_ENCODING = 
Bytes.toBytes("DATA_BLOCK_ENCODING");
-  private boolean validateAll;
-  private boolean validateDBE;
+public class PreUpgradeValidator implements Tool {
+  private static final Logger LOG = LoggerFactory
+      .getLogger(PreUpgradeValidator.class);
 
-  /**
-   * Check DataBlockEncodings of column families are compatible.
-   *
-   * @return number of column families with incompatible DataBlockEncoding
-   * @throws IOException if a remote or network exception occurs
-   */
-  private int validateDBE() throws IOException {
-    int incompatibilities = 0;
+  public static final String TOOL_NAME = "pre-upgrade";
+  public static final String VALIDATE_CP_NAME = "validate-cp";
+  public static final String VALIDATE_DBE_NAME = "validate-dbe";
 
-    LOG.info("Validating Data Block Encodings");
+  private Configuration configuration;
 
-    try (Connection connection = ConnectionFactory.createConnection(getConf());
-        Admin admin = connection.getAdmin()) {
-      List<TableDescriptor> tableDescriptors = admin.listTableDescriptors();
-      String encoding = "";
-
-      for (TableDescriptor td : tableDescriptors) {
-        ColumnFamilyDescriptor[] columnFamilies = td.getColumnFamilies();
-        for (ColumnFamilyDescriptor cfd : columnFamilies) {
-          try {
-            encoding = Bytes.toString(cfd.getValue(DATA_BLOCK_ENCODING));
-            // IllegalArgumentException will be thrown if encoding is 
incompatible with 2.0
-            DataBlockEncoding.valueOf(encoding);
-          } catch (IllegalArgumentException e) {
-            incompatibilities++;
-            LOG.warn("Incompatible DataBlockEncoding for table: {}, cf: {}, 
encoding: {}",
-                td.getTableName().getNameAsString(), cfd.getNameAsString(), 
encoding);
-          }
-        }
-      }
-    }
-
-    if (incompatibilities > 0) {
-      LOG.warn("There are {} column families with incompatible Data Block 
Encodings. Do not "
-          + "upgrade until these encodings are converted to a supported one.", 
incompatibilities);
-      LOG.warn("Check 
http://hbase.apache.org/book.html#upgrade2.0.prefix-tree.removed "
-          + "for instructions.");
-    } else {
-      LOG.info("The used Data Block Encodings are compatible with HBase 2.0.");
-    }
-    return incompatibilities;
+  @Override
+  public Configuration getConf() {
+    return configuration;
   }
 
   @Override
-  protected void addOptions() {
-    addOptNoArg("all", "Run all pre-upgrade validations");
-    addOptNoArg("validateDBE", "Validate DataBlockEncodings are compatible");
+  public void setConf(Configuration conf) {
+    this.configuration = conf;
   }
 
-  @Override
-  protected void processOptions(CommandLine cmd) {
-    validateAll = cmd.hasOption("all");
-    validateDBE = cmd.hasOption("validateDBE");
+  private void printUsage() {
+    System.out.println("usage: hbase " + TOOL_NAME + " command ...");
+    System.out.println("Available commands:");
+    System.out.printf(" %-12s Validate co-processors are compatible with 
HBase%n",
+        VALIDATE_CP_NAME);
+    System.out.printf(" %-12s Validate DataBlockEncoding are compatible on the 
cluster%n",
+        VALIDATE_DBE_NAME);
+    System.out.println("For further information, please use command -h");
   }
 
   @Override
-  protected int doWork() throws Exception {
-    boolean validationFailed = false;
-    if (validateDBE || validateAll) {
-      if (validateDBE() > 0) {
-        validationFailed = true;
-      }
+  public int run(String[] args) throws Exception {
+    if (args.length == 0) {
+      printUsage();
+      return AbstractHBaseTool.EXIT_FAILURE;
+    }
+
+    Tool tool;
+
+    switch (args[0]) {
+      case VALIDATE_CP_NAME:
+        tool = new CoprocessorValidator();
+        break;
+      case VALIDATE_DBE_NAME:
+        tool = new DataBlockEncodingValidator();
+        break;
+      case "-h":
+        printUsage();
+        return AbstractHBaseTool.EXIT_FAILURE;
+      default:
+        System.err.println("Unknown command: " + args[0]);
+        printUsage();
+        return AbstractHBaseTool.EXIT_FAILURE;
     }
 
-    return validationFailed ? 1 : 0;
+    tool.setConf(getConf());
+    return tool.run(Arrays.copyOfRange(args, 1, args.length));
   }
 
   public static void main(String[] args) {
-    new PreUpgradeValidator().doStaticMain(args);
+    int ret;
+
+    try {
+      ret = ToolRunner.run(HBaseConfiguration.create(), new 
PreUpgradeValidator(), args);
+    } catch (Exception e) {
+      LOG.error("Error running command-line tool", e);
+      ret = AbstractHBaseTool.EXIT_FAILURE;
+    }
+
+    System.exit(ret);
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/c323e7bf/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/Branch1CoprocessorMethods.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/Branch1CoprocessorMethods.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/Branch1CoprocessorMethods.java
new file mode 100644
index 0000000..0f5d829
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/Branch1CoprocessorMethods.java
@@ -0,0 +1,1137 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.tool.coprocessor;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+@InterfaceAudience.Private
+public class Branch1CoprocessorMethods extends CoprocessorMethods {
+  public Branch1CoprocessorMethods() {
+    addMethods();
+  }
+
+  /*
+   * This list of methods was generated from HBase 1.4.4.
+   */
+  private void addMethods() {
+    /* BulkLoadObserver */
+
+    addMethod("prePrepareBulkLoad",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        
"org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest");
+
+    addMethod("preCleanupBulkLoad",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        
"org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest");
+
+    /* EndpointObserver */
+
+    addMethod("postEndpointInvocation",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "com.google.protobuf.Service",
+        "java.lang.String",
+        "com.google.protobuf.Message",
+        "com.google.protobuf.Message.Builder");
+
+    addMethod("preEndpointInvocation",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "com.google.protobuf.Service",
+        "java.lang.String",
+        "com.google.protobuf.Message");
+
+    /* MasterObserver */
+
+    addMethod("preCreateTable",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HTableDescriptor",
+        "org.apache.hadoop.hbase.HRegionInfo[]");
+
+    addMethod("postCreateTable",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HTableDescriptor",
+        "org.apache.hadoop.hbase.HRegionInfo[]");
+
+    addMethod("preDeleteTable",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName");
+
+    addMethod("postDeleteTable",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName");
+
+    addMethod("preDeleteTableHandler",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName");
+
+    addMethod("preMove",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HRegionInfo",
+        "org.apache.hadoop.hbase.ServerName",
+        "org.apache.hadoop.hbase.ServerName");
+
+    addMethod("preCreateTableHandler",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HTableDescriptor",
+        "org.apache.hadoop.hbase.HRegionInfo[]");
+
+    addMethod("postCreateTableHandler",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HTableDescriptor",
+        "org.apache.hadoop.hbase.HRegionInfo[]");
+
+    addMethod("postMove",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HRegionInfo",
+        "org.apache.hadoop.hbase.ServerName",
+        "org.apache.hadoop.hbase.ServerName");
+
+    addMethod("postDeleteTableHandler",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName");
+
+    addMethod("preTruncateTable",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName");
+
+    addMethod("postTruncateTable",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName");
+
+    addMethod("preTruncateTableHandler",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName");
+
+    addMethod("postTruncateTableHandler",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName");
+
+    addMethod("preModifyTable",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName",
+        "org.apache.hadoop.hbase.HTableDescriptor");
+
+    addMethod("postModifyTable",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName",
+        "org.apache.hadoop.hbase.HTableDescriptor");
+
+    addMethod("preModifyTableHandler",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName",
+        "org.apache.hadoop.hbase.HTableDescriptor");
+
+    addMethod("postModifyTableHandler",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName",
+        "org.apache.hadoop.hbase.HTableDescriptor");
+
+    addMethod("preAddColumn",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName",
+        "org.apache.hadoop.hbase.HColumnDescriptor");
+
+    addMethod("postAddColumn",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName",
+        "org.apache.hadoop.hbase.HColumnDescriptor");
+
+    addMethod("preAddColumnHandler",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName",
+        "org.apache.hadoop.hbase.HColumnDescriptor");
+
+    addMethod("postAddColumnHandler",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName",
+        "org.apache.hadoop.hbase.HColumnDescriptor");
+
+    addMethod("preModifyColumn",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName",
+        "org.apache.hadoop.hbase.HColumnDescriptor");
+
+    addMethod("postModifyColumn",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName",
+        "org.apache.hadoop.hbase.HColumnDescriptor");
+
+    addMethod("preModifyColumnHandler",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName",
+        "org.apache.hadoop.hbase.HColumnDescriptor");
+
+    addMethod("postModifyColumnHandler",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName",
+        "org.apache.hadoop.hbase.HColumnDescriptor");
+
+    addMethod("preDeleteColumn",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName",
+        "byte[]");
+
+    addMethod("postDeleteColumn",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName",
+        "byte[]");
+
+    addMethod("preDeleteColumnHandler",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName",
+        "byte[]");
+
+    addMethod("postDeleteColumnHandler",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName",
+        "byte[]");
+
+    addMethod("preEnableTable",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName");
+
+    addMethod("postEnableTable",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName");
+
+    addMethod("preEnableTableHandler",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName");
+
+    addMethod("postEnableTableHandler",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName");
+
+    addMethod("preDisableTable",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName");
+
+    addMethod("postDisableTable",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName");
+
+    addMethod("preDisableTableHandler",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName");
+
+    addMethod("postDisableTableHandler",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName");
+
+    addMethod("preAbortProcedure",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.procedure2.ProcedureExecutor",
+        "long");
+
+    addMethod("postAbortProcedure",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("preListProcedures",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("postListProcedures",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.List");
+
+    addMethod("preAssign",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HRegionInfo");
+
+    addMethod("postAssign",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HRegionInfo");
+
+    addMethod("preUnassign",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HRegionInfo",
+        "boolean");
+
+    addMethod("postUnassign",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HRegionInfo",
+        "boolean");
+
+    addMethod("preRegionOffline",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HRegionInfo");
+
+    addMethod("postRegionOffline",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HRegionInfo");
+
+    addMethod("preBalance",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("postBalance",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.List");
+
+    addMethod("preSetSplitOrMergeEnabled",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "boolean",
+        "org.apache.hadoop.hbase.client.Admin.MasterSwitchType");
+
+    addMethod("postSetSplitOrMergeEnabled",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "boolean",
+        "org.apache.hadoop.hbase.client.Admin.MasterSwitchType");
+
+    addMethod("preBalanceSwitch",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "boolean");
+
+    addMethod("postBalanceSwitch",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "boolean",
+        "boolean");
+
+    addMethod("preShutdown",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("preStopMaster",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("postStartMaster",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("preMasterInitialization",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("preSnapshot",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        
"org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription",
+        "org.apache.hadoop.hbase.HTableDescriptor");
+
+    addMethod("postSnapshot",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        
"org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription",
+        "org.apache.hadoop.hbase.HTableDescriptor");
+
+    addMethod("preListSnapshot",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        
"org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription");
+
+    addMethod("postListSnapshot",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        
"org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription");
+
+    addMethod("preCloneSnapshot",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        
"org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription",
+        "org.apache.hadoop.hbase.HTableDescriptor");
+
+    addMethod("postCloneSnapshot",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        
"org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription",
+        "org.apache.hadoop.hbase.HTableDescriptor");
+
+    addMethod("preRestoreSnapshot",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        
"org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription",
+        "org.apache.hadoop.hbase.HTableDescriptor");
+
+    addMethod("postRestoreSnapshot",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        
"org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription",
+        "org.apache.hadoop.hbase.HTableDescriptor");
+
+    addMethod("preDeleteSnapshot",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        
"org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription");
+
+    addMethod("postDeleteSnapshot",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        
"org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription");
+
+    addMethod("preGetTableDescriptors",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.List",
+        "java.util.List");
+
+    addMethod("preGetTableDescriptors",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.List",
+        "java.util.List",
+        "java.lang.String");
+
+    addMethod("postGetTableDescriptors",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.List",
+        "java.util.List",
+        "java.lang.String");
+
+    addMethod("postGetTableDescriptors",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.List");
+
+    addMethod("preGetTableNames",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.List",
+        "java.lang.String");
+
+    addMethod("postGetTableNames",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.List",
+        "java.lang.String");
+
+    addMethod("preCreateNamespace",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.NamespaceDescriptor");
+
+    addMethod("postCreateNamespace",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.NamespaceDescriptor");
+
+    addMethod("preDeleteNamespace",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.lang.String");
+
+    addMethod("postDeleteNamespace",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.lang.String");
+
+    addMethod("preModifyNamespace",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.NamespaceDescriptor");
+
+    addMethod("postModifyNamespace",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.NamespaceDescriptor");
+
+    addMethod("preGetNamespaceDescriptor",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.lang.String");
+
+    addMethod("postGetNamespaceDescriptor",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.NamespaceDescriptor");
+
+    addMethod("preListNamespaceDescriptors",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.List");
+
+    addMethod("postListNamespaceDescriptors",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.List");
+
+    addMethod("preTableFlush",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName");
+
+    addMethod("postTableFlush",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName");
+
+    addMethod("preSetUserQuota",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.lang.String",
+        "java.lang.String",
+        "org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas");
+
+    addMethod("preSetUserQuota",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.lang.String",
+        "org.apache.hadoop.hbase.TableName",
+        "org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas");
+
+    addMethod("preSetUserQuota",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.lang.String",
+        "org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas");
+
+    addMethod("postSetUserQuota",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.lang.String",
+        "java.lang.String",
+        "org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas");
+
+    addMethod("postSetUserQuota",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.lang.String",
+        "org.apache.hadoop.hbase.TableName",
+        "org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas");
+
+    addMethod("postSetUserQuota",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.lang.String",
+        "org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas");
+
+    addMethod("preSetTableQuota",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName",
+        "org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas");
+
+    addMethod("postSetTableQuota",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.TableName",
+        "org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas");
+
+    addMethod("preSetNamespaceQuota",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.lang.String",
+        "org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas");
+
+    addMethod("postSetNamespaceQuota",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.lang.String",
+        "org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas");
+
+    addMethod("preDispatchMerge",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HRegionInfo",
+        "org.apache.hadoop.hbase.HRegionInfo");
+
+    addMethod("postDispatchMerge",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HRegionInfo",
+        "org.apache.hadoop.hbase.HRegionInfo");
+
+    addMethod("preGetClusterStatus",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("postGetClusterStatus",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.ClusterStatus");
+
+    addMethod("preClearDeadServers",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("postClearDeadServers",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.List",
+        "java.util.List");
+
+    addMethod("preMoveServers",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.Set",
+        "java.lang.String");
+
+    addMethod("postMoveServers",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.Set",
+        "java.lang.String");
+
+    addMethod("preMoveTables",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.Set",
+        "java.lang.String");
+
+    addMethod("postMoveTables",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.Set",
+        "java.lang.String");
+
+    addMethod("preMoveServersAndTables",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.Set",
+        "java.util.Set",
+        "java.lang.String");
+
+    addMethod("postMoveServersAndTables",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.Set",
+        "java.util.Set",
+        "java.lang.String");
+
+    addMethod("preAddRSGroup",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.lang.String");
+
+    addMethod("postAddRSGroup",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.lang.String");
+
+    addMethod("preRemoveRSGroup",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.lang.String");
+
+    addMethod("postRemoveRSGroup",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.lang.String");
+
+    addMethod("preRemoveServers",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.Set");
+
+    addMethod("postRemoveServers",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.Set");
+
+    addMethod("preBalanceRSGroup",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.lang.String");
+
+    addMethod("postBalanceRSGroup",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.lang.String",
+        "boolean");
+
+    /* RegionObserver */
+
+    addMethod("preOpen",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("postOpen",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("postLogReplay",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("preFlushScannerOpen",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Store",
+        "org.apache.hadoop.hbase.regionserver.KeyValueScanner",
+        "org.apache.hadoop.hbase.regionserver.InternalScanner",
+        "long");
+
+    addMethod("preFlushScannerOpen",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Store",
+        "org.apache.hadoop.hbase.regionserver.KeyValueScanner",
+        "org.apache.hadoop.hbase.regionserver.InternalScanner");
+
+    addMethod("preFlush",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Store",
+        "org.apache.hadoop.hbase.regionserver.InternalScanner");
+
+    addMethod("preFlush",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("postFlush",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Store",
+        "org.apache.hadoop.hbase.regionserver.StoreFile");
+
+    addMethod("postFlush",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("preCompactSelection",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Store",
+        "java.util.List");
+
+    addMethod("preCompactSelection",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Store",
+        "java.util.List",
+        "org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest");
+
+    addMethod("postCompactSelection",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Store",
+        "com.google.common.collect.ImmutableList");
+
+    addMethod("postCompactSelection",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Store",
+        "com.google.common.collect.ImmutableList",
+        "org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest");
+
+    addMethod("preCompact",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Store",
+        "org.apache.hadoop.hbase.regionserver.InternalScanner",
+        "org.apache.hadoop.hbase.regionserver.ScanType");
+
+    addMethod("preCompact",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Store",
+        "org.apache.hadoop.hbase.regionserver.InternalScanner",
+        "org.apache.hadoop.hbase.regionserver.ScanType",
+        "org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest");
+
+    addMethod("preClose",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "boolean");
+
+    addMethod("preCompactScannerOpen",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Store",
+        "java.util.List",
+        "org.apache.hadoop.hbase.regionserver.ScanType",
+        "long",
+        "org.apache.hadoop.hbase.regionserver.InternalScanner");
+
+    addMethod("preCompactScannerOpen",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Store",
+        "java.util.List",
+        "org.apache.hadoop.hbase.regionserver.ScanType",
+        "long",
+        "org.apache.hadoop.hbase.regionserver.InternalScanner",
+        "org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest",
+        "long");
+
+    addMethod("preCompactScannerOpen",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Store",
+        "java.util.List",
+        "org.apache.hadoop.hbase.regionserver.ScanType",
+        "long",
+        "org.apache.hadoop.hbase.regionserver.InternalScanner",
+        "org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest");
+
+    addMethod("postCompact",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Store",
+        "org.apache.hadoop.hbase.regionserver.StoreFile");
+
+    addMethod("postCompact",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Store",
+        "org.apache.hadoop.hbase.regionserver.StoreFile",
+        "org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest");
+
+    addMethod("preSplit",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "byte[]");
+
+    addMethod("preSplit",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("postSplit",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Region",
+        "org.apache.hadoop.hbase.regionserver.Region");
+
+    addMethod("preSplitBeforePONR",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "byte[]",
+        "java.util.List");
+
+    addMethod("preSplitAfterPONR",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("preRollBackSplit",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("postRollBackSplit",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("postCompleteSplit",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("postClose",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "boolean");
+
+    addMethod("preGetClosestRowBefore",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "byte[]",
+        "byte[]",
+        "org.apache.hadoop.hbase.client.Result");
+
+    addMethod("postGetClosestRowBefore",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "byte[]",
+        "byte[]",
+        "org.apache.hadoop.hbase.client.Result");
+
+    addMethod("preGetOp",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.client.Get",
+        "java.util.List");
+
+    addMethod("postGetOp",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.client.Get",
+        "java.util.List");
+
+    addMethod("preExists",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.client.Get",
+        "boolean");
+
+    addMethod("postExists",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.client.Get",
+        "boolean");
+
+    addMethod("prePut",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.client.Put",
+        "org.apache.hadoop.hbase.regionserver.wal.WALEdit",
+        "org.apache.hadoop.hbase.client.Durability");
+
+    addMethod("postPut",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.client.Put",
+        "org.apache.hadoop.hbase.regionserver.wal.WALEdit",
+        "org.apache.hadoop.hbase.client.Durability");
+
+    addMethod("preDelete",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.client.Delete",
+        "org.apache.hadoop.hbase.regionserver.wal.WALEdit",
+        "org.apache.hadoop.hbase.client.Durability");
+
+    addMethod("prePrepareTimeStampForDeleteVersion",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.client.Mutation",
+        "org.apache.hadoop.hbase.Cell",
+        "byte[]",
+        "org.apache.hadoop.hbase.client.Get");
+
+    addMethod("postDelete",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.client.Delete",
+        "org.apache.hadoop.hbase.regionserver.wal.WALEdit",
+        "org.apache.hadoop.hbase.client.Durability");
+
+    addMethod("preBatchMutate",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress");
+
+    addMethod("postBatchMutate",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress");
+
+    addMethod("postStartRegionOperation",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Region.Operation");
+
+    addMethod("postCloseRegionOperation",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Region.Operation");
+
+    addMethod("postBatchMutateIndispensably",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress",
+        "boolean");
+
+    addMethod("preCheckAndPut",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "byte[]",
+        "byte[]",
+        "byte[]",
+        "org.apache.hadoop.hbase.filter.CompareFilter.CompareOp",
+        "org.apache.hadoop.hbase.filter.ByteArrayComparable",
+        "org.apache.hadoop.hbase.client.Put",
+        "boolean");
+
+    addMethod("preCheckAndPutAfterRowLock",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "byte[]",
+        "byte[]",
+        "byte[]",
+        "org.apache.hadoop.hbase.filter.CompareFilter.CompareOp",
+        "org.apache.hadoop.hbase.filter.ByteArrayComparable",
+        "org.apache.hadoop.hbase.client.Put",
+        "boolean");
+
+    addMethod("postCheckAndPut",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "byte[]",
+        "byte[]",
+        "byte[]",
+        "org.apache.hadoop.hbase.filter.CompareFilter.CompareOp",
+        "org.apache.hadoop.hbase.filter.ByteArrayComparable",
+        "org.apache.hadoop.hbase.client.Put",
+        "boolean");
+
+    addMethod("preCheckAndDelete",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "byte[]",
+        "byte[]",
+        "byte[]",
+        "org.apache.hadoop.hbase.filter.CompareFilter.CompareOp",
+        "org.apache.hadoop.hbase.filter.ByteArrayComparable",
+        "org.apache.hadoop.hbase.client.Delete",
+        "boolean");
+
+    addMethod("preCheckAndDeleteAfterRowLock",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "byte[]",
+        "byte[]",
+        "byte[]",
+        "org.apache.hadoop.hbase.filter.CompareFilter.CompareOp",
+        "org.apache.hadoop.hbase.filter.ByteArrayComparable",
+        "org.apache.hadoop.hbase.client.Delete",
+        "boolean");
+
+    addMethod("postCheckAndDelete",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "byte[]",
+        "byte[]",
+        "byte[]",
+        "org.apache.hadoop.hbase.filter.CompareFilter.CompareOp",
+        "org.apache.hadoop.hbase.filter.ByteArrayComparable",
+        "org.apache.hadoop.hbase.client.Delete",
+        "boolean");
+
+    addMethod("preIncrementColumnValue",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "byte[]",
+        "byte[]",
+        "byte[]",
+        "long",
+        "boolean");
+
+    addMethod("postIncrementColumnValue",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "byte[]",
+        "byte[]",
+        "byte[]",
+        "long",
+        "boolean",
+        "long");
+
+    addMethod("preAppend",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.client.Append");
+
+    addMethod("preAppendAfterRowLock",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.client.Append");
+
+    addMethod("postAppend",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.client.Append",
+        "org.apache.hadoop.hbase.client.Result");
+
+    addMethod("preIncrement",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.client.Increment");
+
+    addMethod("preIncrementAfterRowLock",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.client.Increment");
+
+    addMethod("postIncrement",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.client.Increment",
+        "org.apache.hadoop.hbase.client.Result");
+
+    addMethod("preScannerOpen",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.client.Scan",
+        "org.apache.hadoop.hbase.regionserver.RegionScanner");
+
+    addMethod("preStoreScannerOpen",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Store",
+        "org.apache.hadoop.hbase.client.Scan",
+        "java.util.NavigableSet",
+        "org.apache.hadoop.hbase.regionserver.KeyValueScanner");
+
+    addMethod("postScannerOpen",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.client.Scan",
+        "org.apache.hadoop.hbase.regionserver.RegionScanner");
+
+    addMethod("preScannerNext",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.InternalScanner",
+        "java.util.List",
+        "int",
+        "boolean");
+
+    addMethod("postScannerNext",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.InternalScanner",
+        "java.util.List",
+        "int",
+        "boolean");
+
+    addMethod("postScannerFilterRow",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.InternalScanner",
+        "byte[]",
+        "int",
+        "short",
+        "boolean");
+
+    addMethod("preScannerClose",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.InternalScanner");
+
+    addMethod("postScannerClose",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.InternalScanner");
+
+    addMethod("preWALRestore",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HRegionInfo",
+        "org.apache.hadoop.hbase.regionserver.wal.HLogKey",
+        "org.apache.hadoop.hbase.regionserver.wal.WALEdit");
+
+    addMethod("preWALRestore",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HRegionInfo",
+        "org.apache.hadoop.hbase.wal.WALKey",
+        "org.apache.hadoop.hbase.regionserver.wal.WALEdit");
+
+    addMethod("postWALRestore",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HRegionInfo",
+        "org.apache.hadoop.hbase.regionserver.wal.HLogKey",
+        "org.apache.hadoop.hbase.regionserver.wal.WALEdit");
+
+    addMethod("postWALRestore",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HRegionInfo",
+        "org.apache.hadoop.hbase.wal.WALKey",
+        "org.apache.hadoop.hbase.regionserver.wal.WALEdit");
+
+    addMethod("preBulkLoadHFile",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.List");
+
+    addMethod("preCommitStoreFile",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "byte[]",
+        "java.util.List");
+
+    addMethod("postCommitStoreFile",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "byte[]",
+        "org.apache.hadoop.fs.Path",
+        "org.apache.hadoop.fs.Path");
+
+    addMethod("postBulkLoadHFile",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.List",
+        "boolean");
+
+    addMethod("preStoreFileReaderOpen",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.fs.FileSystem",
+        "org.apache.hadoop.fs.Path",
+        "org.apache.hadoop.hbase.io.FSDataInputStreamWrapper",
+        "long",
+        "org.apache.hadoop.hbase.io.hfile.CacheConfig",
+        "org.apache.hadoop.hbase.io.Reference",
+        "org.apache.hadoop.hbase.regionserver.StoreFile.Reader");
+
+    addMethod("postStoreFileReaderOpen",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.fs.FileSystem",
+        "org.apache.hadoop.fs.Path",
+        "org.apache.hadoop.hbase.io.FSDataInputStreamWrapper",
+        "long",
+        "org.apache.hadoop.hbase.io.hfile.CacheConfig",
+        "org.apache.hadoop.hbase.io.Reference",
+        "org.apache.hadoop.hbase.regionserver.StoreFile.Reader");
+
+    addMethod("postMutationBeforeWAL",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType",
+        "org.apache.hadoop.hbase.client.Mutation",
+        "org.apache.hadoop.hbase.Cell",
+        "org.apache.hadoop.hbase.Cell");
+
+    addMethod("postInstantiateDeleteTracker",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.DeleteTracker");
+
+    /* RegionServerObserver */
+
+    addMethod("preMerge",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Region",
+        "org.apache.hadoop.hbase.regionserver.Region");
+
+    addMethod("preStopRegionServer",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("postMerge",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Region",
+        "org.apache.hadoop.hbase.regionserver.Region",
+        "org.apache.hadoop.hbase.regionserver.Region");
+
+    addMethod("preMergeCommit",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Region",
+        "org.apache.hadoop.hbase.regionserver.Region",
+        "java.util.List");
+
+    addMethod("postMergeCommit",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Region",
+        "org.apache.hadoop.hbase.regionserver.Region",
+        "org.apache.hadoop.hbase.regionserver.Region");
+
+    addMethod("preRollBackMerge",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Region",
+        "org.apache.hadoop.hbase.regionserver.Region");
+
+    addMethod("postRollBackMerge",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.regionserver.Region",
+        "org.apache.hadoop.hbase.regionserver.Region");
+
+    addMethod("preRollWALWriterRequest",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("postRollWALWriterRequest",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext");
+
+    addMethod("postCreateReplicationEndPoint",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.replication.ReplicationEndpoint");
+
+    addMethod("preReplicateLogEntries",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.List",
+        "org.apache.hadoop.hbase.CellScanner");
+
+    addMethod("postReplicateLogEntries",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "java.util.List",
+        "org.apache.hadoop.hbase.CellScanner");
+
+    /* WALObserver */
+
+    addMethod("preWALWrite",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HRegionInfo",
+        "org.apache.hadoop.hbase.wal.WALKey",
+        "org.apache.hadoop.hbase.regionserver.wal.WALEdit");
+
+    addMethod("preWALWrite",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HRegionInfo",
+        "org.apache.hadoop.hbase.regionserver.wal.HLogKey",
+        "org.apache.hadoop.hbase.regionserver.wal.WALEdit");
+
+    addMethod("postWALWrite",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HRegionInfo",
+        "org.apache.hadoop.hbase.regionserver.wal.HLogKey",
+        "org.apache.hadoop.hbase.regionserver.wal.WALEdit");
+
+    addMethod("postWALWrite",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.hbase.HRegionInfo",
+        "org.apache.hadoop.hbase.wal.WALKey",
+        "org.apache.hadoop.hbase.regionserver.wal.WALEdit");
+
+    addMethod("preWALRoll",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.fs.Path",
+        "org.apache.hadoop.fs.Path");
+
+    addMethod("postWALRoll",
+        "org.apache.hadoop.hbase.coprocessor.ObserverContext",
+        "org.apache.hadoop.fs.Path",
+        "org.apache.hadoop.fs.Path");
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/c323e7bf/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorMethod.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorMethod.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorMethod.java
new file mode 100644
index 0000000..60e3841
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorMethod.java
@@ -0,0 +1,73 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.tool.coprocessor;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+@InterfaceAudience.Private
+public class CoprocessorMethod {
+  private final String name;
+  private final List<String> parameters;
+
+  public CoprocessorMethod(String name) {
+    this.name = name;
+
+    parameters = new ArrayList<>();
+  }
+
+  public CoprocessorMethod withParameters(String ... parameters) {
+    for (String parameter : parameters) {
+      this.parameters.add(parameter);
+    }
+
+    return this;
+  }
+
+  public CoprocessorMethod withParameters(Class<?> ... parameters) {
+    for (Class<?> parameter : parameters) {
+      this.parameters.add(parameter.getCanonicalName());
+    }
+
+    return this;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (obj == this) {
+      return true;
+    } else if (!(obj instanceof CoprocessorMethod)) {
+      return false;
+    }
+
+    CoprocessorMethod other = (CoprocessorMethod)obj;
+
+    return Objects.equals(name, other.name) &&
+        Objects.equals(parameters, other.parameters);
+  }
+
+  @Override
+  public int hashCode() {
+    return Objects.hash(name, parameters);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/c323e7bf/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorMethods.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorMethods.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorMethods.java
new file mode 100644
index 0000000..2e0c801
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorMethods.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.tool.coprocessor;
+
+import java.lang.reflect.Method;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+@InterfaceAudience.Private
+public class CoprocessorMethods {
+  private final Set<CoprocessorMethod> methods;
+
+  public CoprocessorMethods() {
+    methods = new HashSet<>();
+  }
+
+  public void addMethod(String name, String ... parameters) {
+    CoprocessorMethod cpMethod = new 
CoprocessorMethod(name).withParameters(parameters);
+    methods.add(cpMethod);
+  }
+
+  public void addMethod(String name, Class<?> ... parameters) {
+    CoprocessorMethod cpMethod = new 
CoprocessorMethod(name).withParameters(parameters);
+    methods.add(cpMethod);
+  }
+
+  public void addMethod(Method method) {
+    CoprocessorMethod cpMethod = new CoprocessorMethod(method.getName())
+        .withParameters(method.getParameterTypes());
+    methods.add(cpMethod);
+  }
+
+  public boolean hasMethod(String name, String ... parameters) {
+    CoprocessorMethod method = new 
CoprocessorMethod(name).withParameters(parameters);
+    return methods.contains(method);
+  }
+
+  public boolean hasMethod(String name, Class<?> ... parameters) {
+    CoprocessorMethod method = new 
CoprocessorMethod(name).withParameters(parameters);
+    return methods.contains(method);
+  }
+
+  public boolean hasMethod(Method method) {
+    CoprocessorMethod cpMethod = new CoprocessorMethod(method.getName())
+        .withParameters(method.getParameterTypes());
+    return methods.contains(cpMethod);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/c323e7bf/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidator.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidator.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidator.java
new file mode 100644
index 0000000..c6d5723
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidator.java
@@ -0,0 +1,247 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.tool.coprocessor;
+
+import java.io.IOException;
+import java.lang.reflect.Method;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.security.AccessController;
+import java.security.PrivilegedAction;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.jar.JarEntry;
+import java.util.jar.JarFile;
+import java.util.stream.Collectors;
+
+import org.apache.hadoop.hbase.Coprocessor;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+import org.apache.hadoop.hbase.tool.PreUpgradeValidator;
+import org.apache.hadoop.hbase.tool.coprocessor.CoprocessorViolation.Severity;
+import org.apache.hadoop.hbase.util.AbstractHBaseTool;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
+import org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException;
+
+@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
+public class CoprocessorValidator extends AbstractHBaseTool {
+  private static final Logger LOG = LoggerFactory
+      .getLogger(CoprocessorValidator.class);
+
+  private CoprocessorMethods branch1;
+  private CoprocessorMethods current;
+
+  private boolean dieOnWarnings;
+  private boolean scan;
+  private List<String> args;
+
+  public CoprocessorValidator() {
+    branch1 = new Branch1CoprocessorMethods();
+    current = new CurrentCoprocessorMethods();
+  }
+
+  /**
+   * This classloader implementation calls {@link #resolveClass(Class)}
+   * method for every loaded class. It means that some extra validation will
+   * take place <a
+   * 
href="https://docs.oracle.com/javase/specs/jls/se8/html/jls-12.html#jls-12.3";>
+   * according to JLS</a>.
+   */
+  private static final class ResolverUrlClassLoader extends URLClassLoader {
+    private ResolverUrlClassLoader(URL[] urls) {
+      super(urls, ResolverUrlClassLoader.class.getClassLoader());
+    }
+
+    @Override
+    public Class<?> loadClass(String name) throws ClassNotFoundException {
+      return loadClass(name, true);
+    }
+  }
+
+  private ResolverUrlClassLoader createClassLoader(URL[] urls) {
+    return AccessController.doPrivileged(new 
PrivilegedAction<ResolverUrlClassLoader>() {
+      @Override
+      public ResolverUrlClassLoader run() {
+        return new ResolverUrlClassLoader(urls);
+      }
+    });
+  }
+
+  private void validate(ClassLoader classLoader, String className,
+      List<CoprocessorViolation> violations) {
+    LOG.debug("Validating class '{}'.", className);
+
+    try {
+      Class<?> clazz = classLoader.loadClass(className);
+
+      for (Method method : clazz.getDeclaredMethods()) {
+        LOG.trace("Validating method '{}'.", method);
+
+        if (branch1.hasMethod(method) && !current.hasMethod(method)) {
+          CoprocessorViolation violation = new 
CoprocessorViolation(Severity.WARNING,
+              "Method '" + method + "' was removed from new coprocessor API, "
+                  + "so it won't be called by HBase.");
+          violations.add(violation);
+        }
+      }
+    } catch (ClassNotFoundException e) {
+      CoprocessorViolation violation = new CoprocessorViolation(Severity.ERROR,
+          "No such class '" + className + "'.", e);
+      violations.add(violation);
+    } catch (RuntimeException | Error e) {
+      CoprocessorViolation violation = new CoprocessorViolation(Severity.ERROR,
+          "Could not validate class '" + className + "'.", e);
+      violations.add(violation);
+    }
+  }
+
+  public List<CoprocessorViolation> validate(ClassLoader classLoader, 
List<String> classNames) {
+    List<CoprocessorViolation> violations = new ArrayList<>();
+
+    for (String className : classNames) {
+      validate(classLoader, className, violations);
+    }
+
+    return violations;
+  }
+
+  public List<CoprocessorViolation> validate(List<URL> urls, List<String> 
classNames)
+      throws IOException {
+    URL[] urlArray = new URL[urls.size()];
+    urls.toArray(urlArray);
+
+    try (ResolverUrlClassLoader classLoader = createClassLoader(urlArray)) {
+      return validate(classLoader, classNames);
+    }
+  }
+
+  @VisibleForTesting
+  protected List<String> getJarClasses(Path path) throws IOException {
+    try (JarFile jarFile = new JarFile(path.toFile())) {
+      return jarFile.stream()
+          .map(JarEntry::getName)
+          .filter((name) -> name.endsWith(".class"))
+          .map((name) -> name.substring(0, name.length() - 6).replace('/', 
'.'))
+          .collect(Collectors.toList());
+    }
+  }
+
+  @VisibleForTesting
+  protected List<String> filterObservers(ClassLoader classLoader,
+      Iterable<String> classNames) throws ClassNotFoundException {
+    List<String> filteredClassNames = new ArrayList<>();
+
+    for (String className : classNames) {
+      LOG.debug("Scanning class '{}'.", className);
+
+      Class<?> clazz = classLoader.loadClass(className);
+
+      if (Coprocessor.class.isAssignableFrom(clazz)) {
+        LOG.debug("Found coprocessor class '{}'.", className);
+        filteredClassNames.add(className);
+      }
+    }
+
+    return filteredClassNames;
+  }
+
+  @Override
+  protected void printUsage() {
+    String header = "hbase " + PreUpgradeValidator.TOOL_NAME + " " +
+        PreUpgradeValidator.VALIDATE_CP_NAME + " <jar> -scan|<classes>";
+    printUsage(header, "Options:", "");
+  }
+
+  @Override
+  protected void addOptions() {
+    addOptNoArg("e", "Treat warnings as errors.");
+    addOptNoArg("scan", "Scan jar for observers.");
+  }
+
+  @Override
+  protected void processOptions(CommandLine cmd) {
+    scan = cmd.hasOption("scan");
+    dieOnWarnings = cmd.hasOption("e");
+    args = cmd.getArgList();
+  }
+
+  @Override
+  protected int doWork() throws Exception {
+    if (args.size() < 1) {
+      System.err.println("Missing jar file.");
+      printUsage();
+      return EXIT_FAILURE;
+    }
+
+    String jar = args.get(0);
+
+    if (args.size() == 1 && !scan) {
+      throw new ParseException("Missing classes or -scan option.");
+    } else if (args.size() > 1 && scan) {
+      throw new ParseException("Can't use classes with -scan option.");
+    }
+
+    Path jarPath = Paths.get(jar);
+    URL[] urls = new URL[] { jarPath.toUri().toURL() };
+
+    List<CoprocessorViolation> violations;
+
+    try (ResolverUrlClassLoader classLoader = createClassLoader(urls)) {
+      List<String> classNames;
+
+      if (scan) {
+        List<String> jarClassNames = getJarClasses(jarPath);
+        classNames = filterObservers(classLoader, jarClassNames);
+      } else {
+        classNames = args.subList(1, args.size());
+      }
+
+      violations = validate(classLoader, classNames);
+    }
+
+    boolean error = false;
+
+    for (CoprocessorViolation violation : violations) {
+      switch (violation.getSeverity()) {
+        case WARNING:
+          System.err.println("[WARNING] " + violation.getMessage());
+
+          if (dieOnWarnings) {
+            error = true;
+          }
+
+          break;
+        case ERROR:
+          System.err.println("[ERROR] " + violation.getMessage());
+          error = true;
+
+          break;
+      }
+    }
+
+    return (error) ? EXIT_FAILURE : EXIT_SUCCESS;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/c323e7bf/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorViolation.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorViolation.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorViolation.java
new file mode 100644
index 0000000..c403c07
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorViolation.java
@@ -0,0 +1,56 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.tool.coprocessor;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+import org.apache.hbase.thirdparty.com.google.common.base.Throwables;
+
+@InterfaceAudience.Private
+public class CoprocessorViolation {
+  public enum Severity {
+    WARNING, ERROR
+  }
+
+  private final Severity severity;
+  private final String message;
+
+  public CoprocessorViolation(Severity severity, String message) {
+    this(severity, message, null);
+  }
+
+  public CoprocessorViolation(Severity severity, String message, Throwable t) {
+    this.severity = severity;
+
+    if (t == null) {
+      this.message = message;
+    } else {
+      this.message = message + "\n" + Throwables.getStackTraceAsString(t);
+    }
+  }
+
+  public Severity getSeverity() {
+    return severity;
+  }
+
+  public String getMessage() {
+    return message;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/c323e7bf/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CurrentCoprocessorMethods.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CurrentCoprocessorMethods.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CurrentCoprocessorMethods.java
new file mode 100644
index 0000000..265cf51
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/coprocessor/CurrentCoprocessorMethods.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.tool.coprocessor;
+
+import java.lang.reflect.Method;
+
+import org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
+import org.apache.hadoop.hbase.coprocessor.EndpointObserver;
+import org.apache.hadoop.hbase.coprocessor.MasterObserver;
+import org.apache.hadoop.hbase.coprocessor.RegionObserver;
+import org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
+import org.apache.hadoop.hbase.coprocessor.WALObserver;
+import org.apache.yetus.audience.InterfaceAudience;
+
+@InterfaceAudience.Private
+public class CurrentCoprocessorMethods extends CoprocessorMethods {
+  public CurrentCoprocessorMethods() {
+    addMethods(BulkLoadObserver.class);
+    addMethods(EndpointObserver.class);
+    addMethods(MasterObserver.class);
+    addMethods(RegionObserver.class);
+    addMethods(RegionServerObserver.class);
+    addMethods(WALObserver.class);
+  }
+
+  private void addMethods(Class<?> clazz) {
+    for (Method method : clazz.getDeclaredMethods()) {
+      addMethod(method);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/c323e7bf/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidatorTest.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidatorTest.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidatorTest.java
new file mode 100644
index 0000000..8926ff5
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidatorTest.java
@@ -0,0 +1,177 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.tool.coprocessor;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.hbase.Coprocessor;
+import org.apache.hadoop.hbase.CoprocessorEnvironment;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.tool.coprocessor.CoprocessorViolation.Severity;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+
+@Category({ SmallTests.class })
+@SuppressWarnings("deprecation")
+public class CoprocessorValidatorTest {
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+      HBaseClassTestRule.forClass(CoprocessorValidatorTest.class);
+
+  private CoprocessorValidator validator;
+
+  public CoprocessorValidatorTest() {
+    validator = new CoprocessorValidator();
+  }
+
+  private static ClassLoader getClassLoader() {
+    return CoprocessorValidatorTest.class.getClassLoader();
+  }
+
+  private static String getFullClassName(String className) {
+    return CoprocessorValidatorTest.class.getName() + "$" + className;
+  }
+
+  @SuppressWarnings({"rawtypes", "unused"})
+  private static class TestObserver implements Coprocessor {
+    @Override
+    public void start(CoprocessorEnvironment env) throws IOException {
+    }
+
+    @Override
+    public void stop(CoprocessorEnvironment env) throws IOException {
+    }
+  }
+
+  @Test
+  public void testFilterObservers() throws Exception {
+    String filterObservers = getFullClassName("TestObserver");
+    List<String> classNames = Lists.newArrayList(
+        filterObservers, getClass().getName());
+    List<String> filteredClassNames = 
validator.filterObservers(getClassLoader(), classNames);
+
+    assertEquals(1, filteredClassNames.size());
+    assertEquals(filterObservers, filteredClassNames.get(0));
+  }
+
+  private List<CoprocessorViolation> validate(String className) {
+    ClassLoader classLoader = getClass().getClassLoader();
+    return validate(classLoader, className);
+  }
+
+  private List<CoprocessorViolation> validate(ClassLoader classLoader, String 
className) {
+    List<String> classNames = Lists.newArrayList(getClass().getName() + "$" + 
className);
+    return validator.validate(classLoader, classNames);
+  }
+
+  /*
+   * In this test case, we are try to load a not-existent class.
+   */
+  @Test
+  public void testNoSuchClass() throws IOException {
+    List<CoprocessorViolation> violations = validate("NoSuchClass");
+    assertEquals(1, violations.size());
+
+    CoprocessorViolation violation = violations.get(0);
+    assertEquals(Severity.ERROR, violation.getSeverity());
+    assertTrue(violation.getMessage().contains(
+        "java.lang.ClassNotFoundException: " +
+        
"org.apache.hadoop.hbase.tool.coprocessor.CoprocessorValidatorTest$NoSuchClass"));
+  }
+
+  /*
+   * In this test case, we are validating MissingClass coprocessor, which
+   * references a missing class. With a special classloader, we prevent that
+   * class to be loaded at runtime. It simulates similar cases where a class
+   * is no more on our classpath.
+   * E.g. org.apache.hadoop.hbase.regionserver.wal.WALEdit was moved to
+   * org.apache.hadoop.hbase.wal, so class loading will fail on 2.0.
+   */
+  private static class MissingClass {
+  }
+
+  @SuppressWarnings("unused")
+  private static class MissingClassObserver {
+    public void method(MissingClass missingClass) {
+    }
+  }
+
+  private static class MissingClassClassLoader extends ClassLoader {
+    public MissingClassClassLoader() {
+      super(getClassLoader());
+    }
+
+    @Override
+    public Class<?> loadClass(String name) throws ClassNotFoundException {
+      if (name.equals(getFullClassName("MissingClass"))) {
+        throw new ClassNotFoundException(name);
+      }
+
+      return super.findClass(name);
+    }
+  }
+
+  @Test
+  public void testMissingClass() throws IOException {
+    MissingClassClassLoader missingClassClassLoader = new 
MissingClassClassLoader();
+    List<CoprocessorViolation> violations = validate(missingClassClassLoader,
+        "MissingClassObserver");
+    assertEquals(1, violations.size());
+
+    CoprocessorViolation violation = violations.get(0);
+    assertEquals(Severity.ERROR, violation.getSeverity());
+    assertTrue(violation.getMessage().contains(
+        "java.lang.ClassNotFoundException: " +
+        
"org.apache.hadoop.hbase.tool.coprocessor.CoprocessorValidatorTest$MissingClass"));
+  }
+
+  /*
+   * ObsoleteMethod coprocessor implements preCreateTable method which has
+   * HRegionInfo parameters. In our current implementation, we pass only
+   * RegionInfo parameters, so this method won't be called by HBase at all.
+   */
+  @SuppressWarnings("unused")
+  private static class ObsoleteMethodObserver /* implements MasterObserver */ {
+    public void preCreateTable(ObserverContext<MasterCoprocessorEnvironment> 
ctx,
+        HTableDescriptor desc, HRegionInfo[] regions) throws IOException {
+    }
+  }
+
+  @Test
+  public void testObsoleteMethod() throws IOException {
+    List<CoprocessorViolation> violations = validate("ObsoleteMethodObserver");
+    assertEquals(1, violations.size());
+
+    CoprocessorViolation violation = violations.get(0);
+    assertEquals(Severity.WARNING, violation.getSeverity());
+    assertTrue(violation.getMessage().contains("was removed from new 
coprocessor API"));
+  }
+}

Reply via email to