HBASE-16338 Remove Jackson1 deps

* Change imports from org.codehaus to com.fasterxml
* Exclude transitive jackson1 from hadoop and others
* Minor test cleanup to add assert messages, fix some parameter order
* Add anti-pattern check for using jackson 1 imports
* Add explicit non-null serialization directive to ScannerModel


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5facaded
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5facaded
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5facaded

Branch: refs/heads/HBASE-18410
Commit: 5facaded902a13556952b1f9d26b768cb86e6599
Parents: a43a00e
Author: Mike Drob <md...@apache.org>
Authored: Mon Oct 2 16:31:48 2017 -0500
Committer: Mike Drob <md...@apache.org>
Committed: Fri Oct 20 09:20:12 2017 -0500

----------------------------------------------------------------------
 dev-support/hbase-personality.sh                |   6 ++
 hbase-client/pom.xml                            |   8 +-
 .../apache/hadoop/hbase/util/JsonMapper.java    |   2 +-
 .../hadoop/hbase/client/TestOperation.java      |   2 +-
 hbase-it/pom.xml                                |   4 +
 .../hadoop/hbase/RESTApiClusterManager.java     |  18 ++--
 hbase-mapreduce/pom.xml                         |  12 +--
 .../hadoop/hbase/PerformanceEvaluation.java     |  10 +-
 .../hadoop/hbase/TestPerformanceEvaluation.java |   6 +-
 .../src/main/resources/supplemental-models.xml  |  13 ---
 hbase-rest/pom.xml                              |  21 ++--
 .../hbase/rest/ProtobufStreamingOutput.java     | 105 ++++++++++++++++++
 .../hbase/rest/ProtobufStreamingUtil.java       | 106 -------------------
 .../apache/hadoop/hbase/rest/RESTServer.java    |   4 +-
 .../hadoop/hbase/rest/TableScanResource.java    |  26 ++---
 .../hadoop/hbase/rest/model/CellModel.java      |   2 +-
 .../hbase/rest/model/ColumnSchemaModel.java     |   5 +-
 .../hbase/rest/model/NamespacesModel.java       |   3 +-
 .../hadoop/hbase/rest/model/RowModel.java       |   2 +-
 .../hadoop/hbase/rest/model/ScannerModel.java   |   6 +-
 .../rest/model/StorageClusterStatusModel.java   |   6 ++
 .../rest/model/StorageClusterVersionModel.java  |   3 -
 .../hbase/rest/model/TableSchemaModel.java      |   7 +-
 .../hbase/rest/HBaseRESTTestingUtility.java     |   5 +-
 .../hadoop/hbase/rest/RowResourceBase.java      |   4 +-
 .../apache/hadoop/hbase/rest/TestDeleteRow.java |   2 +-
 .../hadoop/hbase/rest/TestMultiRowResource.java |   9 +-
 .../rest/TestNamespacesInstanceResource.java    |   9 +-
 .../hadoop/hbase/rest/TestSchemaResource.java   |  52 ++++++---
 .../apache/hadoop/hbase/rest/TestTableScan.java |  60 +++--------
 .../hadoop/hbase/rest/TestVersionResource.java  |  21 ++--
 .../hbase/rest/model/TestColumnSchemaModel.java |  16 +--
 .../hadoop/hbase/rest/model/TestModelBase.java  |   6 +-
 .../hbase/rest/model/TestTableSchemaModel.java  |   3 +
 hbase-server/pom.xml                            |  16 +--
 .../hadoop/hbase/io/hfile/AgeSnapshot.java      |   2 +-
 .../hadoop/hbase/io/hfile/BlockCacheUtil.java   |  17 ++-
 .../hadoop/hbase/io/hfile/LruBlockCache.java    |   5 +-
 .../hbase/io/hfile/bucket/BucketAllocator.java  |   2 +-
 .../org/apache/hadoop/hbase/ipc/RpcServer.java  |   2 +-
 .../hbase/monitoring/MonitoredTaskImpl.java     |   2 +-
 .../org/apache/hadoop/hbase/util/JSONBean.java  |   6 +-
 .../hadoop/hbase/util/JSONMetricUtil.java       |  10 +-
 .../hadoop/hbase/wal/WALPrettyPrinter.java      |   2 +-
 .../hbase-webapps/master/processMaster.jsp      |   2 +-
 .../hbase-webapps/master/processRS.jsp          |   2 +-
 .../hbase-webapps/regionserver/processRS.jsp    |   2 +-
 .../hbase/io/hfile/TestBlockCacheReporting.java |   4 +-
 .../hadoop/hbase/util/TestJSONMetricUtil.java   |  33 +++---
 hbase-shaded/hbase-shaded-mapreduce/pom.xml     |   4 -
 hbase-shaded/pom.xml                            |   4 +
 hbase-shell/src/main/ruby/hbase/taskmonitor.rb  |   2 +-
 hbase-spark/pom.xml                             |  20 ++++
 pom.xml                                         |  97 ++++++++++++-----
 54 files changed, 417 insertions(+), 381 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/dev-support/hbase-personality.sh
----------------------------------------------------------------------
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index 9b23e11..27c2169 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -428,6 +428,12 @@ function hbaseanti_patchfile
     ((result=result+1))
   fi
 
+  warnings=$(${GREP} 'import org.codehaus.jackson' "${patchfile}")
+  if [[ ${warnings} -gt 0 ]]; then
+    add_vote_table -1 hbaseanti "" "The patch appears use Jackson 1 
classes/annotations: ${warnings}."
+    ((result=result+1))
+  fi
+
   if [[ ${result} -gt 0 ]]; then
     return 1
   fi

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-client/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-client/pom.xml b/hbase-client/pom.xml
index d9aa006..a8e73c7 100644
--- a/hbase-client/pom.xml
+++ b/hbase-client/pom.xml
@@ -169,10 +169,6 @@
       <artifactId>htrace-core</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.codehaus.jackson</groupId>
-      <artifactId>jackson-mapper-asl</artifactId>
-    </dependency>
-    <dependency>
       <groupId>org.jruby.jcodings</groupId>
       <artifactId>jcodings</artifactId>
     </dependency>
@@ -218,6 +214,10 @@
         </exclusion>
       </exclusions>
     </dependency>
+    <dependency>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-databind</artifactId>
+    </dependency>
   </dependencies>
 
   <profiles>

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-client/src/main/java/org/apache/hadoop/hbase/util/JsonMapper.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/JsonMapper.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/JsonMapper.java
index 53cd264..b5d31ff 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/JsonMapper.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/JsonMapper.java
@@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.util;
 import java.io.IOException;
 import java.util.Map;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.yetus.audience.InterfaceAudience;
-import org.codehaus.jackson.map.ObjectMapper;
 
 /**
  * Utility class for converting objects to JSON

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
index 800de6d..61c9f3f 100644
--- 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
@@ -62,10 +62,10 @@ import 
org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.BuilderStyleTest;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.codehaus.jackson.map.ObjectMapper;
 import org.junit.Assert;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import com.fasterxml.jackson.databind.ObjectMapper;
 
 /**
  * Run tests that use the functionality of the Operation superclass for

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-it/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml
index 111c4cf..42c8da7 100644
--- a/hbase-it/pom.xml
+++ b/hbase-it/pom.xml
@@ -294,6 +294,10 @@
       <artifactId>junit</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-databind</artifactId>
+    </dependency>
   </dependencies>
 
   <profiles>

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java
----------------------------------------------------------------------
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java
index 055b58a..8f69d33 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java
@@ -18,13 +18,13 @@
 
 package org.apache.hadoop.hbase;
 
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.codehaus.jackson.JsonNode;
-import org.codehaus.jackson.map.ObjectMapper;
 import org.glassfish.jersey.client.authentication.HttpAuthenticationFeature;
 import javax.ws.rs.client.Client;
 import javax.ws.rs.client.ClientBuilder;
@@ -222,8 +222,8 @@ public class RESTApiClusterManager extends Configured 
implements ClusterManager
     if (hosts != null) {
       // Iterate through the list of hosts, stopping once you've reached the 
requested hostname.
       for (JsonNode host : hosts) {
-        if (host.get("hostname").getTextValue().equals(hostname)) {
-          hostId = host.get("hostId").getTextValue();
+        if (host.get("hostname").textValue().equals(hostname)) {
+          hostId = host.get("hostId").textValue();
           break;
         }
       }
@@ -272,12 +272,12 @@ public class RESTApiClusterManager extends Configured 
implements ClusterManager
     if (roles != null) {
       // Iterate through the list of roles, stopping once the requested one is 
found.
       for (JsonNode role : roles) {
-        if (role.get("hostRef").get("hostId").getTextValue().equals(hostId) &&
+        if (role.get("hostRef").get("hostId").textValue().equals(hostId) &&
             role.get("type")
-                .getTextValue()
+                .textValue()
                 .toLowerCase(Locale.ROOT)
                 .equals(roleType.toLowerCase(Locale.ROOT))) {
-          roleValue = role.get(property).getTextValue();
+          roleValue = role.get(property).textValue();
           break;
         }
       }
@@ -306,8 +306,8 @@ public class RESTApiClusterManager extends Configured 
implements ClusterManager
     if (services != null) {
       // Iterate through the list of services, stopping once the requested one 
is found.
       for (JsonNode serviceEntry : services) {
-        if 
(serviceEntry.get("type").getTextValue().equals(service.toString())) {
-          serviceName = serviceEntry.get("name").getTextValue();
+        if (serviceEntry.get("type").textValue().equals(service.toString())) {
+          serviceName = serviceEntry.get("name").textValue();
           break;
         }
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-mapreduce/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/pom.xml b/hbase-mapreduce/pom.xml
index 8073173..4a63f41 100644
--- a/hbase-mapreduce/pom.xml
+++ b/hbase-mapreduce/pom.xml
@@ -286,14 +286,6 @@
       <artifactId>zookeeper</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.codehaus.jackson</groupId>
-      <artifactId>jackson-core-asl</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.codehaus.jackson</groupId>
-      <artifactId>jackson-mapper-asl</artifactId>
-    </dependency>
-    <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <scope>test</scope>
@@ -305,6 +297,10 @@
       <version>${netty.hadoop.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-databind</artifactId>
+    </dependency>
   </dependencies>
   <profiles>
     <!-- Skip the tests in this module -->

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
----------------------------------------------------------------------
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 05e984e..2bf94f4 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -18,8 +18,6 @@
  */
 package org.apache.hadoop.hbase;
 
-import static 
org.codehaus.jackson.map.SerializationConfig.Feature.SORT_PROPERTIES_ALPHABETICALLY;
-
 import java.io.IOException;
 import java.io.PrintStream;
 import java.lang.reflect.Constructor;
@@ -71,8 +69,6 @@ import org.apache.hadoop.hbase.client.RowMutations;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
-import org.apache.hadoop.hbase.filter.CompareFilter;
-import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.FilterAllFilter;
 import org.apache.hadoop.hbase.filter.FilterList;
@@ -85,7 +81,6 @@ import org.apache.hadoop.hbase.io.hfile.RandomDistribution;
 import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
 import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.regionserver.CompactingMemStore;
-import org.apache.hadoop.hbase.regionserver.TestHRegionFileSystem;
 import org.apache.hadoop.hbase.trace.HBaseHTraceConfiguration;
 import org.apache.hadoop.hbase.trace.SpanReceiverHost;
 import org.apache.hadoop.hbase.util.*;
@@ -98,7 +93,6 @@ import 
org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
 import org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.codehaus.jackson.map.ObjectMapper;
 import org.apache.htrace.Sampler;
 import org.apache.htrace.Trace;
 import org.apache.htrace.TraceScope;
@@ -108,6 +102,8 @@ import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
 
 import com.codahale.metrics.Histogram;
 import com.codahale.metrics.UniformReservoir;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.MapperFeature;
 
 /**
  * Script used evaluating HBase performance and scalability.  Runs a HBase
@@ -133,7 +129,7 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   private static final Log LOG = 
LogFactory.getLog(PerformanceEvaluation.class.getName());
   private static final ObjectMapper MAPPER = new ObjectMapper();
   static {
-    MAPPER.configure(SORT_PROPERTIES_ALPHABETICALLY, true);
+    MAPPER.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
   }
 
   public static final String TABLE_NAME = "TestTable";

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
----------------------------------------------------------------------
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
index 86a3d3f..106b7e9 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
@@ -37,9 +37,6 @@ import 
org.apache.hadoop.hbase.PerformanceEvaluation.RandomReadTest;
 import org.apache.hadoop.hbase.PerformanceEvaluation.TestOptions;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.codehaus.jackson.JsonGenerationException;
-import org.codehaus.jackson.map.JsonMappingException;
-import org.codehaus.jackson.map.ObjectMapper;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -47,6 +44,9 @@ import org.junit.experimental.categories.Category;
 import com.codahale.metrics.Histogram;
 import com.codahale.metrics.Snapshot;
 import com.codahale.metrics.UniformReservoir;
+import com.fasterxml.jackson.core.JsonGenerationException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
 
 @Category({MiscTests.class, SmallTests.class})
 public class TestPerformanceEvaluation {

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-resource-bundle/src/main/resources/supplemental-models.xml
----------------------------------------------------------------------
diff --git a/hbase-resource-bundle/src/main/resources/supplemental-models.xml 
b/hbase-resource-bundle/src/main/resources/supplemental-models.xml
index 68e2d5f..4acf2fd 100644
--- a/hbase-resource-bundle/src/main/resources/supplemental-models.xml
+++ b/hbase-resource-bundle/src/main/resources/supplemental-models.xml
@@ -514,19 +514,6 @@ under the License.
   </supplement>
   <supplement>
     <project>
-      <groupId>org.glassfish.jersey.media</groupId>
-      <artifactId>jersey-media-json-jackson1</artifactId>
-      <licenses>
-        <license>
-          <name>CDDL 1.1</name>
-          <url>https://glassfish.java.net/public/CDDL+GPL_1_1.html</url>
-          <distribution>repo</distribution>
-        </license>
-      </licenses>
-    </project>
-  </supplement>
-  <supplement>
-    <project>
       <groupId>org.glassfish.web</groupId>
       <artifactId>javax.servlet.jsp</artifactId>
       <licenses>

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml
index 0a12573..2d5d701 100644
--- a/hbase-rest/pom.xml
+++ b/hbase-rest/pom.xml
@@ -206,6 +206,15 @@
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-server</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop-compat</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>${compat.module}</artifactId>
+      <version>${project.version}</version>
+    </dependency>
     <!--Below MR wanted by PE-->
     <dependency>
       <groupId>org.apache.hbase</groupId>
@@ -300,8 +309,8 @@
       <artifactId>jersey-container-servlet-core</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.glassfish.jersey.media</groupId>
-      <artifactId>jersey-media-json-jackson1</artifactId>
+      <groupId>com.fasterxml.jackson.jaxrs</groupId>
+      <artifactId>jackson-jaxrs-json-provider</artifactId>
     </dependency>
     <dependency>
       <!--For JspC used in ant task-->
@@ -321,14 +330,6 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.codehaus.jackson</groupId>
-      <artifactId>jackson-core-asl</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.codehaus.jackson</groupId>
-      <artifactId>jackson-mapper-asl</artifactId>
-    </dependency>
-    <dependency>
       <groupId>org.mockito</groupId>
       <artifactId>mockito-all</artifactId>
       <scope>test</scope>

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java
 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java
new file mode 100644
index 0000000..b067044
--- /dev/null
+++ 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.rest;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.List;
+
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.StreamingOutput;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.rest.model.CellModel;
+import org.apache.hadoop.hbase.rest.model.CellSetModel;
+import org.apache.hadoop.hbase.rest.model.RowModel;
+import org.apache.hadoop.hbase.util.Bytes;
+
+public class ProtobufStreamingOutput implements StreamingOutput {
+  private static final Log LOG = 
LogFactory.getLog(ProtobufStreamingOutput.class);
+
+  private String contentType;
+  private ResultScanner resultScanner;
+  private int limit;
+  private int fetchSize;
+
+  protected ProtobufStreamingOutput(ResultScanner scanner, String type, int 
limit, int fetchSize) {
+    this.resultScanner = scanner;
+    this.contentType = type;
+    this.limit = limit;
+    this.fetchSize = fetchSize;
+    if (LOG.isTraceEnabled()) {
+      LOG.trace("Created StreamingOutput with content type = " + 
this.contentType
+          + " user limit : " + this.limit + " scan fetch size : " + 
this.fetchSize);
+    }
+  }
+
+  @Override
+  public void write(OutputStream outStream) throws IOException, 
WebApplicationException {
+    Result[] rowsToSend;
+    if(limit < fetchSize){
+      rowsToSend = this.resultScanner.next(limit);
+      writeToStream(createModelFromResults(rowsToSend), this.contentType, 
outStream);
+    } else {
+      int count = limit;
+      while (count > 0) {
+        if (count < fetchSize) {
+          rowsToSend = this.resultScanner.next(count);
+        } else {
+          rowsToSend = this.resultScanner.next(this.fetchSize);
+        }
+        if(rowsToSend.length == 0){
+          break;
+        }
+        count = count - rowsToSend.length;
+        writeToStream(createModelFromResults(rowsToSend), this.contentType, 
outStream);
+      }
+    }
+  }
+
+  private void writeToStream(CellSetModel model, String contentType, 
OutputStream outStream)
+      throws IOException {
+    byte[] objectBytes = model.createProtobufOutput();
+    outStream.write(Bytes.toBytes((short)objectBytes.length));
+    outStream.write(objectBytes);
+    outStream.flush();
+    if (LOG.isTraceEnabled()) {
+      LOG.trace("Wrote " + model.getRows().size() + " rows to stream 
successfully.");
+    }
+  }
+
+  private CellSetModel createModelFromResults(Result[] results) {
+    CellSetModel cellSetModel = new CellSetModel();
+    for (Result rs : results) {
+      byte[] rowKey = rs.getRow();
+      RowModel rModel = new RowModel(rowKey);
+      List<Cell> kvs = rs.listCells();
+      for (Cell kv : kvs) {
+        rModel.addCell(new CellModel(CellUtil.cloneFamily(kv), 
CellUtil.cloneQualifier(kv), kv
+            .getTimestamp(), CellUtil.cloneValue(kv)));
+      }
+      cellSetModel.addRow(rModel);
+    }
+    return cellSetModel;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
deleted file mode 100644
index cb0f4c8..0000000
--- 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.rest;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.util.List;
-
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.StreamingOutput;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.rest.model.CellModel;
-import org.apache.hadoop.hbase.rest.model.CellSetModel;
-import org.apache.hadoop.hbase.rest.model.RowModel;
-import org.apache.hadoop.hbase.util.Bytes;
-
-
-public class ProtobufStreamingUtil implements StreamingOutput {
-
-  private static final Log LOG = 
LogFactory.getLog(ProtobufStreamingUtil.class);
-  private String contentType;
-  private ResultScanner resultScanner;
-  private int limit;
-  private int fetchSize;
-
-  protected ProtobufStreamingUtil(ResultScanner scanner, String type, int 
limit, int fetchSize) {
-    this.resultScanner = scanner;
-    this.contentType = type;
-    this.limit = limit;
-    this.fetchSize = fetchSize;
-    if (LOG.isTraceEnabled()) {
-      LOG.trace("Created ScanStreamingUtil with content type = " + 
this.contentType
-        + " user limit : " + this.limit + " scan fetch size : " + 
this.fetchSize);
-    }
-  }
-
-  @Override
-  public void write(OutputStream outStream) throws IOException, 
WebApplicationException {
-    Result[] rowsToSend;
-    if(limit < fetchSize){
-      rowsToSend = this.resultScanner.next(limit);
-      writeToStream(createModelFromResults(rowsToSend), this.contentType, 
outStream);
-    } else {
-      int count = limit;
-      while (count > 0) {
-        if (count < fetchSize) {
-          rowsToSend = this.resultScanner.next(count);
-        } else {
-          rowsToSend = this.resultScanner.next(this.fetchSize);
-        }
-        if(rowsToSend.length == 0){
-          break;
-        }
-        count = count - rowsToSend.length;
-        writeToStream(createModelFromResults(rowsToSend), this.contentType, 
outStream);
-      }
-    }
-  }
-
-  private void writeToStream(CellSetModel model, String contentType, 
OutputStream outStream)
-      throws IOException {
-    byte[] objectBytes = model.createProtobufOutput();
-    outStream.write(Bytes.toBytes((short)objectBytes.length));
-    outStream.write(objectBytes);
-    outStream.flush();
-    if (LOG.isTraceEnabled()) {
-      LOG.trace("Wrote " + model.getRows().size() + " rows to stream 
successfully.");
-    }
-  }
-
-  private CellSetModel createModelFromResults(Result[] results) {
-    CellSetModel cellSetModel = new CellSetModel();
-    for (Result rs : results) {
-      byte[] rowKey = rs.getRow();
-      RowModel rModel = new RowModel(rowKey);
-      List<Cell> kvs = rs.listCells();
-      for (Cell kv : kvs) {
-        rModel.addCell(new CellModel(CellUtil.cloneFamily(kv), 
CellUtil.cloneQualifier(kv), kv
-            .getTimestamp(), CellUtil.cloneValue(kv)));
-      }
-      cellSetModel.addRow(rModel);
-    }
-    return cellSetModel;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
index 907a220..c2f6cf6 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
@@ -27,6 +27,7 @@ import java.util.Set;
 import java.util.EnumSet;
 import java.util.concurrent.ArrayBlockingQueue;
 
+import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.Options;
@@ -67,7 +68,6 @@ import org.eclipse.jetty.util.thread.QueuedThreadPool;
 import org.eclipse.jetty.jmx.MBeanContainer;
 import org.eclipse.jetty.servlet.FilterHolder;
 
-import org.glassfish.jersey.jackson1.Jackson1Feature;
 import org.glassfish.jersey.server.ResourceConfig;
 import org.glassfish.jersey.servlet.ServletContainer;
 
@@ -238,7 +238,7 @@ public class RESTServer implements Constants {
 
     // set up the Jersey servlet container for Jetty
     ResourceConfig application = new ResourceConfig().
-        
packages("org.apache.hadoop.hbase.rest").register(Jackson1Feature.class);
+        
packages("org.apache.hadoop.hbase.rest").register(JacksonJaxbJsonProvider.class);
     ServletHolder sh = new ServletHolder(new ServletContainer(application));
 
     // Set the default max thread number to 100 to limit

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java
index aeb213b..f8b9593 100644
--- 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java
+++ 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java
@@ -23,15 +23,13 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
-import javax.ws.rs.DefaultValue;
 import javax.ws.rs.GET;
 import javax.ws.rs.HeaderParam;
-import javax.ws.rs.PathParam;
 import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.Response;
 import javax.ws.rs.core.Response.ResponseBuilder;
+import javax.ws.rs.core.StreamingOutput;
 import javax.ws.rs.core.UriInfo;
 import javax.xml.bind.annotation.XmlAccessType;
 import javax.xml.bind.annotation.XmlAccessorType;
@@ -47,13 +45,14 @@ import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.rest.model.CellModel;
 import org.apache.hadoop.hbase.rest.model.RowModel;
-import org.codehaus.jackson.annotate.JsonIgnore;
-import org.codehaus.jackson.annotate.JsonProperty;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
 
 @InterfaceAudience.Private
 public class TableScanResource  extends ResourceBase {
-
   private static final Log LOG = LogFactory.getLog(TableScanResource.class);
+
   TableResource tableResource;
   ResultScanner results;
   int userRequestedLimit;
@@ -75,23 +74,14 @@ public class TableScanResource  extends ResourceBase {
     servlet.getMetrics().incrementSucessfulScanRequests(1);
     final Iterator<Result> itr = results.iterator();
     return new CellSetModelStream(new ArrayList<RowModel>() {
+      @Override
       public Iterator<RowModel> iterator() {
         return new Iterator<RowModel>() {
           int count = rowsToSend;
 
           @Override
           public boolean hasNext() {
-            if (count > 0) {
-              return itr.hasNext();
-            } else {
-              return false;
-            }
-          }
-
-          @Override
-          public void remove() {
-            throw new UnsupportedOperationException(
-                "Remove method cannot be used in CellSetModelStream");
+            return count > 0 && itr.hasNext();
           }
 
           @Override
@@ -127,7 +117,7 @@ public class TableScanResource  extends ResourceBase {
     servlet.getMetrics().incrementRequests(1);
     try {
       int fetchSize = 
this.servlet.getConfiguration().getInt(Constants.SCAN_FETCH_SIZE, 10);
-      ProtobufStreamingUtil stream = new ProtobufStreamingUtil(this.results, 
contentType,
+      StreamingOutput stream = new ProtobufStreamingOutput(this.results, 
contentType,
           userRequestedLimit, fetchSize);
       servlet.getMetrics().incrementSucessfulScanRequests(1);
       ResponseBuilder response = Response.ok(stream);

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java
index 3465490..ffb6743 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java
@@ -28,6 +28,7 @@ import javax.xml.bind.annotation.XmlAttribute;
 import javax.xml.bind.annotation.XmlRootElement;
 import javax.xml.bind.annotation.XmlValue;
 
+import com.fasterxml.jackson.annotation.JsonProperty;
 import org.apache.commons.lang3.builder.EqualsBuilder;
 import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.commons.lang3.builder.ToStringBuilder;
@@ -38,7 +39,6 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
 import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell;
-import org.codehaus.jackson.annotate.JsonProperty;
 
 /**
  * Representation of a cell. A cell is a single value associated a column and

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java
 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java
index e9686f2..967f6ba 100644
--- 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java
+++ 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java
@@ -31,8 +31,9 @@ import javax.xml.namespace.QName;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
-import org.codehaus.jackson.annotate.JsonAnyGetter;
-import org.codehaus.jackson.annotate.JsonAnySetter;
+
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
 
 /**
  * Representation of a column family schema.

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/NamespacesModel.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/NamespacesModel.java
 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/NamespacesModel.java
index 9bb5adb..c0dfa75 100644
--- 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/NamespacesModel.java
+++ 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/NamespacesModel.java
@@ -34,7 +34,8 @@ import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
 import 
org.apache.hadoop.hbase.rest.protobuf.generated.NamespacesMessage.Namespaces;
-import org.codehaus.jackson.annotate.JsonProperty;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
 
 
 /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java
index c8f028c..8f0bf53 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java
@@ -30,12 +30,12 @@ import javax.xml.bind.annotation.XmlAttribute;
 import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlRootElement;
 
+import com.fasterxml.jackson.annotation.JsonProperty;
 import org.apache.commons.lang3.builder.EqualsBuilder;
 import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.commons.lang3.builder.ToStringBuilder;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
-import org.codehaus.jackson.annotate.JsonProperty;
 
 /**
  * Representation of a row. A row is a related set of cells, grouped by common

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
index 739af9d..a678fde 100644
--- 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
+++ 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
@@ -44,7 +44,6 @@ import org.apache.hadoop.hbase.filter.ColumnPaginationFilter;
 import org.apache.hadoop.hbase.filter.ColumnPrefixFilter;
 import org.apache.hadoop.hbase.filter.ColumnRangeFilter;
 import org.apache.hadoop.hbase.filter.CompareFilter;
-import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.filter.DependentColumnFilter;
 import org.apache.hadoop.hbase.filter.FamilyFilter;
 import org.apache.hadoop.hbase.filter.Filter;
@@ -77,10 +76,10 @@ import org.apache.hadoop.hbase.util.Base64;
 import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.hbase.util.Bytes;
 
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
 import com.google.protobuf.ByteString;
 
-import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
-
 /**
  * A representation of Scanner parameters.
  *
@@ -101,6 +100,7 @@ import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
  * </pre>
  */
 @XmlRootElement(name="Scanner")
+@JsonInclude(JsonInclude.Include.NON_NULL)
 @InterfaceAudience.Private
 public class ScannerModel implements ProtobufMessageHandler, Serializable {
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java
 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java
index ccce92d..2f38a44 100644
--- 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java
+++ 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java
@@ -36,6 +36,8 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import 
org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus;
 import org.apache.hadoop.hbase.util.Bytes;
 
+import com.fasterxml.jackson.annotation.JsonProperty;
+
 /**
  * Representation of the status of a storage cluster:
  * <p>
@@ -561,6 +563,8 @@ public class StorageClusterStatusModel
    */
   @XmlElement(name = "Node")
   @XmlElementWrapper(name = "LiveNodes")
+  // workaround https://github.com/FasterXML/jackson-dataformat-xml/issues/192
+  @JsonProperty("LiveNodes")
   public List<Node> getLiveNodes() {
     return liveNodes;
   }
@@ -570,6 +574,8 @@ public class StorageClusterStatusModel
    */
   @XmlElement(name = "Node")
   @XmlElementWrapper(name = "DeadNodes")
+  // workaround https://github.com/FasterXML/jackson-dataformat-xml/issues/192
+  @JsonProperty("DeadNodes")
   public List<String> getDeadNodes() {
     return deadNodes;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
index af05b09..5840997 100644
--- 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
+++ 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
@@ -19,13 +19,10 @@
 
 package org.apache.hadoop.hbase.rest.model;
 
-import org.codehaus.jackson.annotate.JsonValue;
-
 import java.io.Serializable;
 
 import javax.xml.bind.annotation.XmlAttribute;
 import javax.xml.bind.annotation.XmlRootElement;
-import javax.xml.bind.annotation.XmlValue;
 
 import org.apache.yetus.audience.InterfaceAudience;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
index 7e81745..553bfe0 100644
--- 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
+++ 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
@@ -43,9 +43,10 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import 
org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema;
 import 
org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.codehaus.jackson.annotate.JsonAnyGetter;
-import org.codehaus.jackson.annotate.JsonAnySetter;
-import org.codehaus.jackson.annotate.JsonIgnore;
+
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+import com.fasterxml.jackson.annotation.JsonIgnore;
 
 /**
  * A representation of HBase table descriptors.

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
index 95d9432..0714c7b 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
@@ -18,6 +18,7 @@
  */
 package org.apache.hadoop.hbase.rest;
 
+import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
 import org.apache.commons.lang3.ArrayUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -32,9 +33,7 @@ import org.eclipse.jetty.server.Server;
 import org.eclipse.jetty.servlet.ServletContextHandler;
 import org.eclipse.jetty.server.ServerConnector;
 import org.eclipse.jetty.servlet.ServletHolder;
-import org.eclipse.jetty.servlet.ServletContextHandler;
 
-import org.glassfish.jersey.jackson1.Jackson1Feature;
 import org.glassfish.jersey.server.ResourceConfig;
 import org.glassfish.jersey.servlet.ServletContainer;
 
@@ -63,7 +62,7 @@ public class HBaseRESTTestingUtility {
 
     // set up the Jersey servlet container for Jetty
     ResourceConfig app = new ResourceConfig().
-        
packages("org.apache.hadoop.hbase.rest").register(Jackson1Feature.class);
+        
packages("org.apache.hadoop.hbase.rest").register(JacksonJaxbJsonProvider.class);
     ServletHolder sh = new ServletHolder(new ServletContainer(app));
 
     // set up Jetty and run the embedded server

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
index aa4fb3e..35442ea 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java
@@ -30,6 +30,8 @@ import javax.xml.bind.JAXBException;
 import javax.xml.bind.Marshaller;
 import javax.xml.bind.Unmarshaller;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -43,8 +45,6 @@ import org.apache.hadoop.hbase.rest.model.CellModel;
 import org.apache.hadoop.hbase.rest.model.CellSetModel;
 import org.apache.hadoop.hbase.rest.model.RowModel;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
-import org.codehaus.jackson.map.ObjectMapper;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestDeleteRow.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestDeleteRow.java 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestDeleteRow.java
index 516ce9e..a3c5e37 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestDeleteRow.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestDeleteRow.java
@@ -33,7 +33,7 @@ public class TestDeleteRow extends RowResourceBase {
   @Test
   public void testDeleteNonExistentColumn() throws Exception {
     Response response = putValueJson(TABLE, ROW_1, COLUMN_1, VALUE_1);
-    assertEquals(response.getCode(), 200);
+    assertEquals(200, response.getCode());
 
     response = checkAndDeleteJson(TABLE, ROW_1, COLUMN_1, VALUE_2);
     assertEquals(304, response.getCode());

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java
index 537a293..ee14ea5 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java
@@ -32,8 +32,6 @@ import org.apache.hadoop.hbase.rest.model.RowModel;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RestTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
-import org.codehaus.jackson.map.ObjectMapper;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -47,9 +45,10 @@ import javax.xml.bind.JAXBException;
 import javax.xml.bind.Marshaller;
 import javax.xml.bind.Unmarshaller;
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.Collection;
-import java.util.List;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
 
 import static org.junit.Assert.assertEquals;
 
@@ -202,7 +201,7 @@ public class TestMultiRowResource {
     assertEquals(response.getCode(), 200);
     ObjectMapper mapper =
         new JacksonJaxbJsonProvider().locateMapper(CellSetModel.class, 
MediaType.APPLICATION_JSON_TYPE);
-    CellSetModel cellSet = (CellSetModel) mapper.readValue(response.getBody(), 
CellSetModel.class);
+    CellSetModel cellSet = mapper.readValue(response.getBody(), 
CellSetModel.class);
     assertEquals(2, cellSet.getRows().size());
     assertEquals(ROW_1, Bytes.toString(cellSet.getRows().get(0).getKey()));
     assertEquals(VALUE_1, 
Bytes.toString(cellSet.getRows().get(0).getCells().get(0).getValue()));

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java
index 58e8ea0..9c724cd 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java
@@ -48,8 +48,6 @@ import 
org.apache.hadoop.hbase.rest.model.TestNamespacesInstanceModel;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RestTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
-import org.codehaus.jackson.map.ObjectMapper;
 
 import static org.junit.Assert.*;
 
@@ -58,6 +56,9 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
+
 @Category({RestTests.class, MediumTests.class})
 public class TestNamespacesInstanceResource {
   private static String NAMESPACE1 = "TestNamespacesInstanceResource1";
@@ -265,11 +266,11 @@ public class TestNamespacesInstanceResource {
 
     // Try REST post and puts with invalid content.
     response = client.post(namespacePath1, Constants.MIMETYPE_JSON, 
toXML(model1));
-    assertEquals(400, response.getCode());
+    assertEquals(500, response.getCode());
     String jsonString = jsonMapper.writeValueAsString(model2);
     response = client.put(namespacePath2, Constants.MIMETYPE_XML, 
Bytes.toBytes(jsonString));
     assertEquals(400, response.getCode());
-    response = client.post(namespacePath3, Constants.MIMETYPE_PROTOBUF, 
toXML(model1));
+    response = client.post(namespacePath3, Constants.MIMETYPE_PROTOBUF, 
toXML(model3));
     assertEquals(500, response.getCode());
 
     NamespaceDescriptor nd1 = findNamespace(admin, NAMESPACE1);

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java
index f0c3d4a..4866d53 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java
@@ -21,13 +21,14 @@ package org.apache.hadoop.hbase.rest;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.StringWriter;
-import java.util.ArrayList;
 import java.util.Collection;
-import java.util.List;
 
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.JAXBException;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
 import org.apache.http.Header;
 import org.apache.http.message.BasicHeader;
 
@@ -47,6 +48,7 @@ import org.apache.hadoop.hbase.util.Bytes;
 
 import static org.junit.Assert.*;
 
+import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -57,6 +59,8 @@ import org.junit.runners.Parameterized;
 @Category({RestTests.class, MediumTests.class})
 @RunWith(Parameterized.class)
 public class TestSchemaResource {
+  private static final Log LOG = LogFactory.getLog(TestSchemaResource.class);
+
   private static String TABLE1 = "TestSchemaResource1";
   private static String TABLE2 = "TestSchemaResource2";
 
@@ -72,11 +76,8 @@ public class TestSchemaResource {
   private static boolean csrfEnabled = true;
 
   @Parameterized.Parameters
-  public static Collection<Object[]> data() {
-    List<Object[]> params = new ArrayList<>(2);
-    params.add(new Object[] {Boolean.TRUE});
-    params.add(new Object[] {Boolean.FALSE});
-    return params;
+  public static Collection<Object[]> parameters() {
+    return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED;
   }
 
   public TestSchemaResource(Boolean csrf) {
@@ -104,6 +105,21 @@ public class TestSchemaResource {
     TEST_UTIL.shutdownMiniCluster();
   }
 
+  @After
+  public void tearDown() throws Exception {
+    Admin admin = TEST_UTIL.getAdmin();
+
+    for (String table : new String[] {TABLE1, TABLE2}) {
+      TableName t = TableName.valueOf(table);
+      if (admin.tableExists(t)) {
+        admin.disableTable(t);
+        admin.deleteTable(t);
+      }
+    }
+
+    conf.set("hbase.rest.readonly", "false");
+  }
+
   private static byte[] toXML(TableSchemaModel model) throws JAXBException {
     StringWriter writer = new StringWriter();
     context.createMarshaller().marshal(model, writer);
@@ -123,7 +139,7 @@ public class TestSchemaResource {
     Response response;
 
     Admin admin = TEST_UTIL.getAdmin();
-    assertFalse(admin.tableExists(TableName.valueOf(TABLE1)));
+    assertFalse("Table " + TABLE1 + " should not exist", 
admin.tableExists(TableName.valueOf(TABLE1)));
 
     // create the table
     model = testTableSchemaModel.buildTestModel(TABLE1);
@@ -131,27 +147,28 @@ public class TestSchemaResource {
     if (csrfEnabled) {
       // test put operation is forbidden without custom header
       response = client.put(schemaPath, Constants.MIMETYPE_XML, toXML(model));
-      assertEquals(response.getCode(), 400);
+      assertEquals(400, response.getCode());
     }
 
     response = client.put(schemaPath, Constants.MIMETYPE_XML, toXML(model), 
extraHdr);
-    assertEquals(response.getCode(), 201);
+    assertEquals("put failed with csrf " + (csrfEnabled ? "enabled" : 
"disabled"),
+       201, response.getCode());
 
     // recall the same put operation but in read-only mode
     conf.set("hbase.rest.readonly", "true");
     response = client.put(schemaPath, Constants.MIMETYPE_XML, toXML(model), 
extraHdr);
-    assertEquals(response.getCode(), 403);
+    assertEquals(403, response.getCode());
 
     // retrieve the schema and validate it
     response = client.get(schemaPath, Constants.MIMETYPE_XML);
-    assertEquals(response.getCode(), 200);
+    assertEquals(200, response.getCode());
     assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type"));
     model = fromXML(response.getBody());
     testTableSchemaModel.checkModel(model, TABLE1);
 
     // with json retrieve the schema and validate it
     response = client.get(schemaPath, Constants.MIMETYPE_JSON);
-    assertEquals(response.getCode(), 200);
+    assertEquals(200, response.getCode());
     assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type"));
     model = testTableSchemaModel.fromJSON(Bytes.toString(response.getBody()));
     testTableSchemaModel.checkModel(model, TABLE1);
@@ -164,14 +181,14 @@ public class TestSchemaResource {
 
     // test delete schema operation is forbidden in read-only mode
     response = client.delete(schemaPath, extraHdr);
-    assertEquals(response.getCode(), 403);
+    assertEquals(403, response.getCode());
 
     // return read-only setting back to default
     conf.set("hbase.rest.readonly", "false");
 
     // delete the table and make sure HBase concurs
     response = client.delete(schemaPath, extraHdr);
-    assertEquals(response.getCode(), 200);
+    assertEquals(200, response.getCode());
     assertFalse(admin.tableExists(TableName.valueOf(TABLE1)));
   }
 
@@ -191,11 +208,12 @@ public class TestSchemaResource {
     if (csrfEnabled) {
       // test put operation is forbidden without custom header
       response = client.put(schemaPath, Constants.MIMETYPE_PROTOBUF, 
model.createProtobufOutput());
-      assertEquals(response.getCode(), 400);
+      assertEquals(400, response.getCode());
     }
     response = client.put(schemaPath, Constants.MIMETYPE_PROTOBUF,
       model.createProtobufOutput(), extraHdr);
-    assertEquals(response.getCode(), 201);
+    assertEquals("put failed with csrf " + (csrfEnabled ? "enabled" : 
"disabled"),
+        201, response.getCode());
 
     // recall the same put operation but in read-only mode
     conf.set("hbase.rest.readonly", "true");

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java
index c674b3c..8380a0a 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java
@@ -43,6 +43,8 @@ import javax.xml.bind.annotation.XmlRootElement;
 import javax.xml.parsers.SAXParserFactory;
 import javax.xml.stream.XMLStreamException;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -61,11 +63,6 @@ import org.apache.hadoop.hbase.rest.model.RowModel;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RestTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.JsonParser;
-import org.codehaus.jackson.JsonToken;
-import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
-import org.codehaus.jackson.map.ObjectMapper;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -73,8 +70,15 @@ import org.junit.experimental.categories.Category;
 import org.xml.sax.InputSource;
 import org.xml.sax.XMLReader;
 
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonToken;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
+
 @Category({RestTests.class, MediumTests.class})
 public class TestTableScan {
+  private static final Log LOG = LogFactory.getLog(TestTableScan.class);
 
   private static final TableName TABLE = TableName.valueOf("TestScanResource");
   private static final String CFA = "a";
@@ -201,7 +205,7 @@ public class TestTableScan {
     builder.append("?");
     builder.append(Constants.SCAN_COLUMN + "=" + COLUMN_1);
     builder.append("&");
-    builder.append(Constants.SCAN_LIMIT + "=20");
+    builder.append(Constants.SCAN_LIMIT + "=2");
     Response response = client.get("/" + TABLE + builder.toString(),
       Constants.MIMETYPE_JSON);
     assertEquals(200, response.getCode());
@@ -210,7 +214,7 @@ public class TestTableScan {
         .locateMapper(CellSetModel.class, MediaType.APPLICATION_JSON_TYPE);
     CellSetModel model = mapper.readValue(response.getStream(), 
CellSetModel.class);
     int count = TestScannerResource.countCellSet(model);
-    assertEquals(20, count);
+    assertEquals(2, count);
     checkRowsNotNull(model);
 
     //Test scanning with no limit.
@@ -305,40 +309,8 @@ public class TestTableScan {
 
   @Test
   public void testStreamingJSON() throws Exception {
-    // Test scanning particular columns with limit.
-    StringBuilder builder = new StringBuilder();
-    builder.append("/*");
-    builder.append("?");
-    builder.append(Constants.SCAN_COLUMN + "=" + COLUMN_1);
-    builder.append("&");
-    builder.append(Constants.SCAN_LIMIT + "=20");
-    Response response = client.get("/" + TABLE + builder.toString(),
-      Constants.MIMETYPE_JSON);
-    assertEquals(200, response.getCode());
-    assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type"));
-    ObjectMapper mapper = new JacksonJaxbJsonProvider()
-        .locateMapper(CellSetModel.class, MediaType.APPLICATION_JSON_TYPE);
-    CellSetModel model = mapper.readValue(response.getStream(), 
CellSetModel.class);
-    int count = TestScannerResource.countCellSet(model);
-    assertEquals(20, count);
-    checkRowsNotNull(model);
-
-    //Test scanning with no limit.
-    builder = new StringBuilder();
-    builder.append("/*");
-    builder.append("?");
-    builder.append(Constants.SCAN_COLUMN + "=" + COLUMN_2);
-    response = client.get("/" + TABLE + builder.toString(),
-      Constants.MIMETYPE_JSON);
-    assertEquals(200, response.getCode());
-    assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type"));
-    model = mapper.readValue(response.getStream(), CellSetModel.class);
-    count = TestScannerResource.countCellSet(model);
-    assertEquals(expectedRows2, count);
-    checkRowsNotNull(model);
-
     //Test with start row and end row.
-    builder = new StringBuilder();
+    StringBuilder builder = new StringBuilder();
     builder.append("/*");
     builder.append("?");
     builder.append(Constants.SCAN_COLUMN + "=" + COLUMN_1);
@@ -346,11 +318,13 @@ public class TestTableScan {
     builder.append(Constants.SCAN_START_ROW + "=aaa");
     builder.append("&");
     builder.append(Constants.SCAN_END_ROW + "=aay");
-    response = client.get("/" + TABLE + builder.toString(),
+    Response response = client.get("/" + TABLE + builder.toString(),
       Constants.MIMETYPE_JSON);
     assertEquals(200, response.getCode());
 
-    count = 0;
+    int count = 0;
+    ObjectMapper mapper = new JacksonJaxbJsonProvider()
+        .locateMapper(CellSetModel.class, MediaType.APPLICATION_JSON_TYPE);
     JsonFactory jfactory = new JsonFactory(mapper);
     JsonParser jParser = jfactory.createJsonParser(response.getStream());
     boolean found = false;
@@ -390,7 +364,7 @@ public class TestTableScan {
     int rowCount = readProtobufStream(response.getStream());
     assertEquals(15, rowCount);
 
-  //Test with start row and end row.
+    //Test with start row and end row.
     builder = new StringBuilder();
     builder.append("/*");
     builder.append("?");

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
index 99fce2c..e76422b 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
@@ -36,10 +36,11 @@ import org.apache.hadoop.hbase.rest.model.VersionModel;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RestTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
-import org.codehaus.jackson.map.ObjectMapper;
 import org.glassfish.jersey.servlet.ServletContainer;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
+
 import static org.junit.Assert.*;
 
 import org.junit.AfterClass;
@@ -99,7 +100,7 @@ public class TestVersionResource {
   @Test
   public void testGetStargateVersionText() throws IOException {
     Response response = client.get("/version", Constants.MIMETYPE_TEXT);
-    assertTrue(response.getCode() == 200);
+    assertEquals(200, response.getCode());
     assertEquals(Constants.MIMETYPE_TEXT, response.getHeader("content-type"));
     String body = Bytes.toString(response.getBody());
     assertTrue(body.length() > 0);
@@ -117,7 +118,7 @@ public class TestVersionResource {
   @Test
   public void testGetStargateVersionXML() throws IOException, JAXBException {
     Response response = client.get("/version", Constants.MIMETYPE_XML);
-    assertTrue(response.getCode() == 200);
+    assertEquals(200, response.getCode());
     assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type"));
     VersionModel model = (VersionModel)
       context.createUnmarshaller().unmarshal(
@@ -129,7 +130,7 @@ public class TestVersionResource {
   @Test
   public void testGetStargateVersionJSON() throws IOException {
     Response response = client.get("/version", Constants.MIMETYPE_JSON);
-    assertTrue(response.getCode() == 200);
+    assertEquals(200, response.getCode());
     assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type"));
     ObjectMapper mapper = new JacksonJaxbJsonProvider()
             .locateMapper(VersionModel.class, MediaType.APPLICATION_JSON_TYPE);
@@ -142,13 +143,13 @@ public class TestVersionResource {
   @Test
   public void testGetStargateVersionPB() throws IOException {
     Response response = client.get("/version", Constants.MIMETYPE_PROTOBUF);
-    assertTrue(response.getCode() == 200);
+    assertEquals(200, response.getCode());
     assertEquals(Constants.MIMETYPE_PROTOBUF, 
response.getHeader("content-type"));
     VersionModel model = new VersionModel();
     model.getObjectFromMessage(response.getBody());
     validate(model);
     response = client.get("/version", Constants.MIMETYPE_PROTOBUF_IETF);
-    assertTrue(response.getCode() == 200);
+    assertEquals(200, response.getCode());
     assertEquals(Constants.MIMETYPE_PROTOBUF_IETF, 
response.getHeader("content-type"));
     model = new VersionModel();
     model.getObjectFromMessage(response.getBody());
@@ -158,7 +159,7 @@ public class TestVersionResource {
   @Test
   public void testGetStorageClusterVersionText() throws IOException {
     Response response = client.get("/version/cluster", 
Constants.MIMETYPE_TEXT);
-    assertTrue(response.getCode() == 200);
+    assertEquals(200, response.getCode());
     assertEquals(Constants.MIMETYPE_TEXT, response.getHeader("content-type"));
   }
 
@@ -166,7 +167,7 @@ public class TestVersionResource {
   public void testGetStorageClusterVersionXML() throws IOException,
       JAXBException {
     Response response = client.get("/version/cluster",Constants.MIMETYPE_XML);
-    assertTrue(response.getCode() == 200);
+    assertEquals(200, response.getCode());
     assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type"));
     StorageClusterVersionModel clusterVersionModel = 
       (StorageClusterVersionModel)
@@ -180,7 +181,7 @@ public class TestVersionResource {
   @Test
   public void testGetStorageClusterVersionJSON() throws IOException {
     Response response = client.get("/version/cluster", 
Constants.MIMETYPE_JSON);
-    assertTrue(response.getCode() == 200);
+    assertEquals(200, response.getCode());
     assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type"));
     ObjectMapper mapper = new JacksonJaxbJsonProvider()
             .locateMapper(StorageClusterVersionModel.class, 
MediaType.APPLICATION_JSON_TYPE);

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java
index af5545e..3c41d6d 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java
@@ -62,14 +62,14 @@ public class TestColumnSchemaModel extends 
TestModelBase<ColumnSchemaModel> {
   }
 
   protected void checkModel(ColumnSchemaModel model) {
-    assertEquals(model.getName(), COLUMN_NAME);
-    assertEquals(model.__getBlockcache(), BLOCKCACHE);
-    assertEquals(model.__getBlocksize(), BLOCKSIZE);
-    assertEquals(model.__getBloomfilter(), BLOOMFILTER);
-    assertTrue(model.__getCompression().equalsIgnoreCase(COMPRESSION));
-    assertEquals(model.__getInMemory(), IN_MEMORY);
-    assertEquals(model.__getTTL(), TTL);
-    assertEquals(model.__getVersions(), VERSIONS);
+    assertEquals("name", COLUMN_NAME, model.getName());
+    assertEquals("block cache", BLOCKCACHE, model.__getBlockcache());
+    assertEquals("block size", BLOCKSIZE, model.__getBlocksize());
+    assertEquals("bloomfilter", BLOOMFILTER, model.__getBloomfilter());
+    assertTrue("compression", 
model.__getCompression().equalsIgnoreCase(COMPRESSION));
+    assertEquals("in memory", IN_MEMORY, model.__getInMemory());
+    assertEquals("ttl", TTL, model.__getTTL());
+    assertEquals("versions", VERSIONS, model.__getVersions());
   }
 
   public void testFromPB() throws Exception {

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java
index 9cba485..b5826b8 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java
@@ -24,9 +24,6 @@ import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
 import org.apache.hadoop.hbase.rest.provider.JAXBContextResolver;
 import org.apache.hadoop.hbase.util.Base64;
-import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.node.ObjectNode;
 import org.junit.experimental.categories.Category;
 
 import javax.ws.rs.core.MediaType;
@@ -35,6 +32,9 @@ import javax.xml.bind.JAXBException;
 import java.io.IOException;
 import java.io.StringReader;
 import java.io.StringWriter;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
 
 @Category({RestTests.class, SmallTests.class})
 public abstract class TestModelBase<T> extends TestCase {

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java
index 4b2eb05..c41128d 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java
@@ -23,6 +23,8 @@ import java.util.Iterator;
 
 import javax.xml.bind.JAXBContext;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.testclassification.RestTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 
@@ -30,6 +32,7 @@ import org.junit.experimental.categories.Category;
 
 @Category({RestTests.class, SmallTests.class})
 public class TestTableSchemaModel extends TestModelBase<TableSchemaModel> {
+  private static final Log LOG = LogFactory.getLog(TestTableSchemaModel.class);
 
   public static final String TABLE_NAME = "testTable";
   private static final boolean IS_META = false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-server/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index ad080f1..3a025d1 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -483,12 +483,12 @@
       <artifactId>jetty-security</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.glassfish.jersey.containers</groupId>
-      <artifactId>jersey-container-servlet-core</artifactId>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-databind</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.glassfish.jersey.media</groupId>
-      <artifactId>jersey-media-json-jackson1</artifactId>
+      <groupId>org.glassfish.jersey.containers</groupId>
+      <artifactId>jersey-container-servlet-core</artifactId>
     </dependency>
     <dependency>
       <!--For JspC used in ant task-->
@@ -503,10 +503,6 @@
       <artifactId>javax.el</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.codehaus.jackson</groupId>
-      <artifactId>jackson-core-asl</artifactId>
-    </dependency>
-    <dependency>
       <groupId>org.codehaus.jettison</groupId>
       <artifactId>jettison</artifactId>
         <exclusions>
@@ -516,10 +512,6 @@
           </exclusion>
         </exclusions>
     </dependency>
-    <dependency>
-      <groupId>org.codehaus.jackson</groupId>
-      <artifactId>jackson-mapper-asl</artifactId>
-    </dependency>
     <!-- General dependencies -->
     <dependency>
        <groupId>com.github.stephenc.findbugs</groupId>

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AgeSnapshot.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AgeSnapshot.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AgeSnapshot.java
index dd3bf25..bb80abe 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AgeSnapshot.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AgeSnapshot.java
@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.hbase.io.hfile;
 
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 import org.apache.hadoop.hbase.metrics.impl.FastLongHistogram;
-import org.codehaus.jackson.annotate.JsonIgnoreProperties;
 
 /**
  * Snapshot of block cache age in cache.

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.java
index fc807db..3c04fa8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.java
@@ -24,16 +24,15 @@ import java.util.NavigableSet;
 import java.util.concurrent.ConcurrentSkipListMap;
 import java.util.concurrent.ConcurrentSkipListSet;
 
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import com.fasterxml.jackson.core.JsonGenerationException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.SerializationFeature;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.metrics.impl.FastLongHistogram;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.codehaus.jackson.JsonGenerationException;
-import org.codehaus.jackson.annotate.JsonIgnoreProperties;
-import org.codehaus.jackson.map.JsonMappingException;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.SerializationConfig;
-
 
 /**
  * Utilty for aggregating counts in CachedBlocks and toString/toJSON 
CachedBlocks and BlockCaches.
@@ -50,9 +49,9 @@ public class BlockCacheUtil {
    */
   private static final ObjectMapper MAPPER = new ObjectMapper();
   static {
-    MAPPER.configure(SerializationConfig.Feature.FAIL_ON_EMPTY_BEANS, false);
-    MAPPER.configure(SerializationConfig.Feature.FLUSH_AFTER_WRITE_VALUE, 
true);
-    MAPPER.configure(SerializationConfig.Feature.INDENT_OUTPUT, true);
+    MAPPER.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
+    MAPPER.configure(SerializationFeature.FLUSH_AFTER_WRITE_VALUE, true);
+    MAPPER.configure(SerializationFeature.INDENT_OUTPUT, true);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
index bd00393..0fde0a7 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
@@ -45,14 +45,15 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ClassSize;
 import org.apache.hadoop.hbase.util.HasThread;
 import org.apache.hadoop.util.StringUtils;
-import org.codehaus.jackson.annotate.JsonIgnore;
-import org.codehaus.jackson.annotate.JsonIgnoreProperties;
 
 import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects;
 import org.apache.hadoop.hbase.shaded.com.google.common.base.Objects;
 import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
 
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+
 /**
  * A block cache implementation that is memory-aware using {@link HeapSize},
  * memory-bound using an LRU eviction algorithm, and concurrent: backed by a

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
index d9c3c9a..40b64be 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
@@ -37,7 +37,7 @@ import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry;
-import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 
 import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects;
 import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
index 36d383a..24cf166 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
@@ -36,6 +36,7 @@ import java.util.Map;
 import java.util.Optional;
 import java.util.concurrent.atomic.LongAdder;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -68,7 +69,6 @@ import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.yetus.audience.InterfaceStability;
-import org.codehaus.jackson.map.ObjectMapper;
 
 import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService;

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
index 688a71c..b3869f4 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
@@ -18,8 +18,8 @@
  */
 package org.apache.hadoop.hbase.monitoring;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.yetus.audience.InterfaceAudience;
-import org.codehaus.jackson.map.ObjectMapper;
 
 import java.io.IOException;
 import java.util.HashMap;

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
index 0739e91..f4a146e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
@@ -41,11 +41,11 @@ import javax.management.openmbean.CompositeData;
 import javax.management.openmbean.CompositeType;
 import javax.management.openmbean.TabularData;
 
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.JsonGenerationException;
+import com.fasterxml.jackson.core.JsonGenerator;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.JsonGenerationException;
-import org.codehaus.jackson.JsonGenerator;
 
 /**
  * Utility for doing JSON and MBeans.

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
index f64934e..879f32e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
@@ -38,11 +38,11 @@ import javax.management.ObjectName;
 import javax.management.ReflectionException;
 import javax.management.openmbean.CompositeData;
 
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.codehaus.jackson.JsonNode;
-import org.codehaus.jackson.JsonProcessingException;
-import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jettison.json.JSONException;
 
 public final class JSONMetricUtil {
@@ -112,8 +112,8 @@ public final class JSONMetricUtil {
     return sw.toString();
   }
 
-  public static JsonNode mappStringToJsonNode(String jsonString) throws
-  JsonProcessingException, IOException {
+  public static JsonNode mappStringToJsonNode(String jsonString)
+      throws JsonProcessingException, IOException {
     ObjectMapper mapper = new ObjectMapper();
     JsonNode node = mapper.readTree(jsonString);
     return node;

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java
index 9ed5a64..d4c320b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java
@@ -27,6 +27,7 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.HelpFormatter;
@@ -47,7 +48,6 @@ import org.apache.yetus.audience.InterfaceStability;
 import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
-import org.codehaus.jackson.map.ObjectMapper;
 
 /**
  * WALPrettyPrinter prints the contents of a given WAL with a variety of

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-server/src/main/resources/hbase-webapps/master/processMaster.jsp
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/resources/hbase-webapps/master/processMaster.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/processMaster.jsp
index 6e87e72..d99e198 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/processMaster.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/processMaster.jsp
@@ -29,7 +29,7 @@
   import="java.lang.management.GarbageCollectorMXBean"
   import="org.apache.hadoop.hbase.util.JSONMetricUtil"
   import="org.apache.hadoop.hbase.procedure2.util.StringUtils"
-  import="org.codehaus.jackson.JsonNode"
+  import="com.fasterxml.jackson.databind.JsonNode"
 %>
 <%
 RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean();

http://git-wip-us.apache.org/repos/asf/hbase/blob/5facaded/hbase-server/src/main/resources/hbase-webapps/master/processRS.jsp
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/processRS.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/processRS.jsp
index cc18d5b..f0df0c0 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/processRS.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/processRS.jsp
@@ -29,7 +29,7 @@
   import="java.lang.management.GarbageCollectorMXBean"
   import="org.apache.hadoop.hbase.util.JSONMetricUtil"
   import="org.apache.hadoop.hbase.procedure2.util.StringUtils"
-  import="org.codehaus.jackson.JsonNode"
+  import="com.fasterxml.jackson.databind.JsonNode"
 %>
 <%
 RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean();

Reply via email to