Repository: hbase
Updated Branches:
  refs/heads/branch-1 c3bf558b6 -> 528eb1082


HBASE-19545 Replace getBytes(StandardCharsets.UTF_8) with Bytes.toBytes

Signed-off-by: Chia-Ping Tsai <chia7...@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/528eb108
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/528eb108
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/528eb108

Branch: refs/heads/branch-1
Commit: 528eb10826503d3ba6d7f8305039e25775f123ed
Parents: c3bf558
Author: Peter Somogyi <psomo...@cloudera.com>
Authored: Thu Dec 21 15:41:14 2017 +0100
Committer: Chia-Ping Tsai <chia7...@gmail.com>
Committed: Wed Dec 27 20:15:01 2017 +0800

----------------------------------------------------------------------
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |  5 +-
 .../apache/hadoop/hbase/security/SaslUtil.java  |  3 +-
 .../hadoop/hbase/TestHColumnDescriptor.java     |  7 +-
 .../hadoop/hbase/client/TestAsyncProcess.java   | 26 +++---
 .../hadoop/hbase/client/TestClientScanner.java  | 30 +++----
 .../hadoop/hbase/client/TestDelayingRunner.java |  9 +-
 .../hadoop/hbase/client/TestOperation.java      | 89 ++++++++------------
 .../hbase/security/TestHBaseSaslRpcClient.java  | 23 ++---
 .../org/apache/hadoop/hbase/util/Base64.java    | 12 +--
 .../org/apache/hadoop/hbase/TestCellUtil.java   | 33 ++++----
 .../io/crypto/TestKeyStoreKeyProvider.java      |  4 +-
 .../apache/hadoop/hbase/types/TestStruct.java   | 16 ++--
 .../hbase/util/TestLoadTestKVGenerator.java     | 12 ++-
 .../hadoop/hbase/util/TestOrderedBytes.java     | 63 +++++++-------
 .../hadoop/hbase/rest/TestGzipFilter.java       |  5 +-
 .../hbase/rest/client/TestXmlParsing.java       |  6 +-
 .../hbase/mapreduce/HFileOutputFormat2.java     | 12 ++-
 .../hadoop/hbase/regionserver/HRegion.java      |  2 +-
 .../hbase/regionserver/HRegionServer.java       | 32 +++----
 .../hbase/zookeeper/MiniZooKeeperCluster.java   | 15 ++--
 20 files changed, 174 insertions(+), 230 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index 051a768..507c6fb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -24,7 +24,6 @@ import com.google.protobuf.ServiceException;
 
 import java.io.IOException;
 import java.io.InterruptedIOException;
-import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -2612,9 +2611,9 @@ public class HBaseAdmin implements Admin {
       final byte[] nameOfRegionB, final boolean forcible)
       throws IOException {
     final byte[] encodedNameOfRegionA = isEncodedRegionName(nameOfRegionA) ? 
nameOfRegionA :
-      
HRegionInfo.encodeRegionName(nameOfRegionA).getBytes(StandardCharsets.UTF_8);
+      Bytes.toBytes(HRegionInfo.encodeRegionName(nameOfRegionA));
     final byte[] encodedNameOfRegionB = isEncodedRegionName(nameOfRegionB) ? 
nameOfRegionB :
-      
HRegionInfo.encodeRegionName(nameOfRegionB).getBytes(StandardCharsets.UTF_8);
+      Bytes.toBytes(HRegionInfo.encodeRegionName(nameOfRegionB));
 
     Pair<HRegionInfo, ServerName> pair = getRegion(nameOfRegionA);
     if (pair != null && pair.getFirst().getReplicaId() != 
HRegionInfo.DEFAULT_REPLICA_ID)

http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
index b26dcac..1516a6b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
@@ -30,6 +30,7 @@ import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.util.Bytes;
 
 @InterfaceAudience.Private
 public class SaslUtil {
@@ -72,7 +73,7 @@ public class SaslUtil {
   }
 
   static byte[] decodeIdentifier(String identifier) {
-    return Base64.decodeBase64(identifier.getBytes(StandardCharsets.UTF_8));
+    return Base64.decodeBase64(Bytes.toBytes(identifier));
   }
 
   static char[] encodePassword(byte[] password) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java
index a4f9260..27d2cc8 100644
--- 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java
@@ -21,8 +21,6 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-import java.nio.charset.StandardCharsets;
-
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
@@ -30,8 +28,9 @@ import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.BuilderStyleTest;
-import org.junit.experimental.categories.Category;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
 /** Tests the HColumnDescriptor with appropriate arguments */
 @Category(SmallTests.class)
@@ -80,7 +79,7 @@ public class TestHColumnDescriptor {
   public void testHColumnDescriptorShouldThrowIAEWhenFamiliyNameEmpty()
       throws Exception {
     try {
-      new HColumnDescriptor("".getBytes(StandardCharsets.UTF_8));
+      new HColumnDescriptor(Bytes.toBytes(""));
       fail("Did not throw");
     } catch (IllegalArgumentException e) {
       assertEquals("Family name can not be empty", e.getLocalizedMessage());

http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
index e0d09a6..e8c7b73 100644
--- 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
@@ -19,12 +19,13 @@
 
 package org.apache.hadoop.hbase.client;
 
-
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.io.IOException;
 import java.io.InterruptedIOException;
-import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -65,12 +66,12 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.AsyncProcess.AsyncRequestFuture;
 import org.apache.hadoop.hbase.client.AsyncProcess.AsyncRequestFutureImpl;
 import org.apache.hadoop.hbase.client.AsyncProcess.ListRowAccess;
-import org.apache.hadoop.hbase.client.AsyncProcess.TaskCountChecker;
-import org.apache.hadoop.hbase.client.AsyncProcess.RowChecker.ReturnCode;
-import org.apache.hadoop.hbase.client.AsyncProcess.RowCheckerHost;
 import org.apache.hadoop.hbase.client.AsyncProcess.RequestSizeChecker;
 import org.apache.hadoop.hbase.client.AsyncProcess.RowChecker;
+import org.apache.hadoop.hbase.client.AsyncProcess.RowChecker.ReturnCode;
+import org.apache.hadoop.hbase.client.AsyncProcess.RowCheckerHost;
 import org.apache.hadoop.hbase.client.AsyncProcess.SubmittedSizeChecker;
+import org.apache.hadoop.hbase.client.AsyncProcess.TaskCountChecker;
 import org.apache.hadoop.hbase.client.backoff.ClientBackoffPolicy;
 import org.apache.hadoop.hbase.client.backoff.ServerStatistics;
 import org.apache.hadoop.hbase.client.coprocessor.Batch;
@@ -80,9 +81,6 @@ import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Threads;
 import org.junit.Assert;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.fail;
 import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
@@ -95,10 +93,10 @@ public class TestAsyncProcess {
   private final static Log LOG = LogFactory.getLog(TestAsyncProcess.class);
   private static final TableName DUMMY_TABLE =
       TableName.valueOf("DUMMY_TABLE");
-  private static final byte[] DUMMY_BYTES_1 = 
"DUMMY_BYTES_1".getBytes(StandardCharsets.UTF_8);
-  private static final byte[] DUMMY_BYTES_2 = 
"DUMMY_BYTES_2".getBytes(StandardCharsets.UTF_8);
-  private static final byte[] DUMMY_BYTES_3 = 
"DUMMY_BYTES_3".getBytes(StandardCharsets.UTF_8);
-  private static final byte[] FAILS = "FAILS".getBytes(StandardCharsets.UTF_8);
+  private static final byte[] DUMMY_BYTES_1 = Bytes.toBytes("DUMMY_BYTES_1");
+  private static final byte[] DUMMY_BYTES_2 = Bytes.toBytes("DUMMY_BYTES_2");
+  private static final byte[] DUMMY_BYTES_3 = Bytes.toBytes("DUMMY_BYTES_3");
+  private static final byte[] FAILS = Bytes.toBytes("FAILS");
   private static final Configuration conf = new Configuration();
 
   private static ServerName sn = ServerName.valueOf("s1:1,1");
@@ -954,7 +952,7 @@ public class TestAsyncProcess {
     final AsyncProcess ap = new MyAsyncProcess(createHConnection(), conf, 
false);
 
     for (int i = 0; i < 1000; i++) {
-      
ap.incTaskCounters(Collections.singletonList("dummy".getBytes(StandardCharsets.UTF_8)),
 sn);
+      ap.incTaskCounters(Collections.singletonList(Bytes.toBytes("dummy")), 
sn);
     }
 
     final Thread myThread = Thread.currentThread();
@@ -985,7 +983,7 @@ public class TestAsyncProcess {
       public void run() {
         Threads.sleep(sleepTime);
         while (ap.tasksInProgress.get() > 0) {
-          
ap.decTaskCounters(Collections.singletonList("dummy".getBytes(StandardCharsets.UTF_8)),
 sn);
+          
ap.decTaskCounters(Collections.singletonList(Bytes.toBytes("dummy")), sn);
         }
       }
     };

http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java
 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java
index 3171422..77f53ed 100644
--- 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java
@@ -30,7 +30,6 @@ import static org.mockito.Matchers.anyInt;
 import static org.mockito.Mockito.when;
 
 import java.io.IOException;
-import java.nio.charset.StandardCharsets;
 import java.util.Iterator;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
@@ -45,6 +44,7 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.ScannerCallable.MoreResults;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -128,8 +128,7 @@ public class TestClientScanner {
   @SuppressWarnings("unchecked")
   public void testNoResultsHint() throws IOException {
     final Result[] results = new Result[1];
-    KeyValue kv1 = new KeyValue("row".getBytes(StandardCharsets.UTF_8),
-      "cf".getBytes(StandardCharsets.UTF_8), 
"cq".getBytes(StandardCharsets.UTF_8), 1,
+    KeyValue kv1 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("cf"), 
Bytes.toBytes("cq"), 1,
         Type.Maximum);
     results[0] = Result.create(new Cell[] {kv1});
 
@@ -190,8 +189,7 @@ public class TestClientScanner {
   @SuppressWarnings("unchecked")
   public void testSizeLimit() throws IOException {
     final Result[] results = new Result[1];
-    KeyValue kv1 = new KeyValue("row".getBytes(StandardCharsets.UTF_8),
-      "cf".getBytes(StandardCharsets.UTF_8), 
"cq".getBytes(StandardCharsets.UTF_8), 1,
+    KeyValue kv1 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("cf"), 
Bytes.toBytes("cq"), 1,
         Type.Maximum);
     results[0] = Result.create(new Cell[] {kv1});
 
@@ -249,14 +247,11 @@ public class TestClientScanner {
   @Test
   @SuppressWarnings("unchecked")
   public void testCacheLimit() throws IOException {
-    KeyValue kv1 = new KeyValue("row1".getBytes(StandardCharsets.UTF_8),
-      "cf".getBytes(StandardCharsets.UTF_8), 
"cq".getBytes(StandardCharsets.UTF_8), 1,
-        Type.Maximum),
-      kv2 = new KeyValue("row2".getBytes(StandardCharsets.UTF_8),
-        "cf".getBytes(StandardCharsets.UTF_8), 
"cq".getBytes(StandardCharsets.UTF_8), 1,
-        Type.Maximum),
-      kv3 = new KeyValue("row3".getBytes(StandardCharsets.UTF_8),
-        "cf".getBytes(StandardCharsets.UTF_8), 
"cq".getBytes(StandardCharsets.UTF_8), 1,
+    KeyValue kv1 = new KeyValue(Bytes.toBytes("row1"), Bytes.toBytes("cf"), 
Bytes.toBytes("cq"), 1,
+        Type.Maximum);
+    KeyValue kv2 = new KeyValue(Bytes.toBytes("row2"), Bytes.toBytes("cf"), 
Bytes.toBytes("cq"), 1,
+        Type.Maximum);
+    KeyValue kv3 = new KeyValue(Bytes.toBytes("row3"), Bytes.toBytes("cf"), 
Bytes.toBytes("cq"), 1,
         Type.Maximum);
     final Result[] results = new Result[] {Result.create(new Cell[] {kv1}),
         Result.create(new Cell[] {kv2}), Result.create(new Cell[] {kv3})};
@@ -330,8 +325,7 @@ public class TestClientScanner {
   @SuppressWarnings("unchecked")
   public void testNoMoreResults() throws IOException {
     final Result[] results = new Result[1];
-    KeyValue kv1 = new KeyValue("row".getBytes(StandardCharsets.UTF_8),
-      "cf".getBytes(StandardCharsets.UTF_8), 
"cq".getBytes(StandardCharsets.UTF_8), 1,
+    KeyValue kv1 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("cf"), 
Bytes.toBytes("cq"), 1,
         Type.Maximum);
     results[0] = Result.create(new Cell[] {kv1});
 
@@ -390,14 +384,12 @@ public class TestClientScanner {
   @SuppressWarnings("unchecked")
   public void testMoreResults() throws IOException {
     final Result[] results1 = new Result[1];
-    KeyValue kv1 = new KeyValue("row".getBytes(StandardCharsets.UTF_8),
-        "cf".getBytes(StandardCharsets.UTF_8), 
"cq".getBytes(StandardCharsets.UTF_8), 1,
+    KeyValue kv1 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("cf"), 
Bytes.toBytes("cq"), 1,
         Type.Maximum);
     results1[0] = Result.create(new Cell[] {kv1});
 
     final Result[] results2 = new Result[1];
-    KeyValue kv2 = new KeyValue("row2".getBytes(StandardCharsets.UTF_8),
-      "cf".getBytes(StandardCharsets.UTF_8), 
"cq".getBytes(StandardCharsets.UTF_8), 1,
+    KeyValue kv2 = new KeyValue(Bytes.toBytes("row2"), Bytes.toBytes("cf"), 
Bytes.toBytes("cq"), 1,
         Type.Maximum);
     results2[0] = Result.create(new Cell[] {kv2});
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDelayingRunner.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDelayingRunner.java
 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDelayingRunner.java
index 4e78555..35dbb8d 100644
--- 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDelayingRunner.java
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDelayingRunner.java
@@ -17,14 +17,15 @@
  */
 package org.apache.hadoop.hbase.client;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
-import java.nio.charset.StandardCharsets;
 import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -34,8 +35,8 @@ public class TestDelayingRunner {
 
   private static final TableName DUMMY_TABLE =
       TableName.valueOf("DUMMY_TABLE");
-  private static final byte[] DUMMY_BYTES_1 = 
"DUMMY_BYTES_1".getBytes(StandardCharsets.UTF_8);
-  private static final byte[] DUMMY_BYTES_2 = 
"DUMMY_BYTES_2".getBytes(StandardCharsets.UTF_8);
+  private static final byte[] DUMMY_BYTES_1 = Bytes.toBytes("DUMMY_BYTES_1");
+  private static final byte[] DUMMY_BYTES_2 = Bytes.toBytes("DUMMY_BYTES_2");
   private static HRegionInfo hri1 =
       new HRegionInfo(DUMMY_TABLE, DUMMY_BYTES_1, DUMMY_BYTES_2, false, 1);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
index 3f67247..89b2f5d 100644
--- 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
@@ -20,22 +20,18 @@ package org.apache.hadoop.hbase.client;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.Assert;
-import org.junit.Test;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
-import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
 import org.apache.hadoop.hbase.filter.ColumnCountGetFilter;
 import org.apache.hadoop.hbase.filter.ColumnPaginationFilter;
@@ -55,15 +51,18 @@ import org.apache.hadoop.hbase.filter.PageFilter;
 import org.apache.hadoop.hbase.filter.PrefixFilter;
 import org.apache.hadoop.hbase.filter.QualifierFilter;
 import org.apache.hadoop.hbase.filter.RowFilter;
-import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
 import org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter;
+import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
 import org.apache.hadoop.hbase.filter.SkipFilter;
 import org.apache.hadoop.hbase.filter.TimestampsFilter;
 import org.apache.hadoop.hbase.filter.ValueFilter;
 import org.apache.hadoop.hbase.filter.WhileMatchFilter;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.BuilderStyleTest;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.codehaus.jackson.map.ObjectMapper;
+import org.junit.Assert;
+import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 /**
@@ -81,66 +80,53 @@ public class TestOperation {
 
   private static List<Long> TS_LIST = Arrays.asList(2L, 3L, 5L);
   private static TimestampsFilter TS_FILTER = new TimestampsFilter(TS_LIST);
-  private static String STR_TS_FILTER =
-      TS_FILTER.getClass().getSimpleName() + " (3/3): [2, 3, 5]";
+  private static String STR_TS_FILTER = TS_FILTER.getClass().getSimpleName() + 
" (3/3): [2, 3, 5]";
 
-  private static List<Long> L_TS_LIST =
-      Arrays.asList(0L, 1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L, 10L);
-  private static TimestampsFilter L_TS_FILTER =
-      new TimestampsFilter(L_TS_LIST);
+  private static List<Long> L_TS_LIST = Arrays.asList(0L, 1L, 2L, 3L, 4L, 5L, 
6L, 7L, 8L, 9L, 10L);
+  private static TimestampsFilter L_TS_FILTER = new 
TimestampsFilter(L_TS_LIST);
   private static String STR_L_TS_FILTER =
       L_TS_FILTER.getClass().getSimpleName() + " (5/11): [0, 1, 2, 3, 4]";
 
   private static String COL_NAME_1 = "col1";
   private static ColumnPrefixFilter COL_PRE_FILTER =
-      new ColumnPrefixFilter(COL_NAME_1.getBytes(StandardCharsets.UTF_8));
+      new ColumnPrefixFilter(Bytes.toBytes(COL_NAME_1));
   private static String STR_COL_PRE_FILTER =
       COL_PRE_FILTER.getClass().getSimpleName() + " " + COL_NAME_1;
 
   private static String COL_NAME_2 = "col2";
-  private static ColumnRangeFilter CR_FILTER = new ColumnRangeFilter(
-      COL_NAME_1.getBytes(StandardCharsets.UTF_8), true,
-      COL_NAME_2.getBytes(StandardCharsets.UTF_8), false);
+  private static ColumnRangeFilter CR_FILTER =
+      new ColumnRangeFilter(Bytes.toBytes(COL_NAME_1), true, 
Bytes.toBytes(COL_NAME_2), false);
   private static String STR_CR_FILTER = CR_FILTER.getClass().getSimpleName()
       + " [" + COL_NAME_1 + ", " + COL_NAME_2 + ")";
 
   private static int COL_COUNT = 9;
-  private static ColumnCountGetFilter CCG_FILTER =
-      new ColumnCountGetFilter(COL_COUNT);
-  private static String STR_CCG_FILTER =
-      CCG_FILTER.getClass().getSimpleName() + " " + COL_COUNT;
+  private static ColumnCountGetFilter CCG_FILTER = new 
ColumnCountGetFilter(COL_COUNT);
+  private static String STR_CCG_FILTER = CCG_FILTER.getClass().getSimpleName() 
+ " " + COL_COUNT;
 
   private static int LIMIT = 3;
   private static int OFFSET = 4;
-  private static ColumnPaginationFilter CP_FILTER =
-      new ColumnPaginationFilter(LIMIT, OFFSET);
+  private static ColumnPaginationFilter CP_FILTER = new 
ColumnPaginationFilter(LIMIT, OFFSET);
   private static String STR_CP_FILTER = CP_FILTER.getClass().getSimpleName()
       + " (" + LIMIT + ", " + OFFSET + ")";
 
   private static String STOP_ROW_KEY = "stop";
   private static InclusiveStopFilter IS_FILTER =
-      new InclusiveStopFilter(STOP_ROW_KEY.getBytes(StandardCharsets.UTF_8));
+      new InclusiveStopFilter(Bytes.toBytes(STOP_ROW_KEY));
   private static String STR_IS_FILTER =
       IS_FILTER.getClass().getSimpleName() + " " + STOP_ROW_KEY;
 
   private static String PREFIX = "prefix";
-  private static PrefixFilter PREFIX_FILTER =
-      new PrefixFilter(PREFIX.getBytes(StandardCharsets.UTF_8));
+  private static PrefixFilter PREFIX_FILTER = new 
PrefixFilter(Bytes.toBytes(PREFIX));
   private static String STR_PREFIX_FILTER = "PrefixFilter " + PREFIX;
 
-  private static byte[][] PREFIXES = {
-      "0".getBytes(StandardCharsets.UTF_8), 
"1".getBytes(StandardCharsets.UTF_8),
-      "2".getBytes(StandardCharsets.UTF_8)};
-  private static MultipleColumnPrefixFilter MCP_FILTER =
-      new MultipleColumnPrefixFilter(PREFIXES);
+  private static byte[][] PREFIXES = { Bytes.toBytes("0"), Bytes.toBytes("1"), 
Bytes.toBytes("2") };
+  private static MultipleColumnPrefixFilter MCP_FILTER = new 
MultipleColumnPrefixFilter(PREFIXES);
   private static String STR_MCP_FILTER =
       MCP_FILTER.getClass().getSimpleName() + " (3/3): [0, 1, 2]";
 
   private static byte[][] L_PREFIXES = {
-    "0".getBytes(StandardCharsets.UTF_8), "1".getBytes(StandardCharsets.UTF_8),
-    "2".getBytes(StandardCharsets.UTF_8), "3".getBytes(StandardCharsets.UTF_8),
-    "4".getBytes(StandardCharsets.UTF_8), "5".getBytes(StandardCharsets.UTF_8),
-    "6".getBytes(StandardCharsets.UTF_8), 
"7".getBytes(StandardCharsets.UTF_8)};
+    Bytes.toBytes("0"), Bytes.toBytes("1"), Bytes.toBytes("2"), 
Bytes.toBytes("3"),
+    Bytes.toBytes("4"), Bytes.toBytes("5"), Bytes.toBytes("6"), 
Bytes.toBytes("7") };
   private static MultipleColumnPrefixFilter L_MCP_FILTER =
       new MultipleColumnPrefixFilter(L_PREFIXES);
   private static String STR_L_MCP_FILTER =
@@ -148,29 +134,25 @@ public class TestOperation {
 
   private static int PAGE_SIZE = 9;
   private static PageFilter PAGE_FILTER = new PageFilter(PAGE_SIZE);
-  private static String STR_PAGE_FILTER =
-      PAGE_FILTER.getClass().getSimpleName() + " " + PAGE_SIZE;
+  private static String STR_PAGE_FILTER = 
PAGE_FILTER.getClass().getSimpleName() + " " + PAGE_SIZE;
 
   private static SkipFilter SKIP_FILTER = new SkipFilter(L_TS_FILTER);
   private static String STR_SKIP_FILTER =
       SKIP_FILTER.getClass().getSimpleName() + " " + STR_L_TS_FILTER;
 
-  private static WhileMatchFilter WHILE_FILTER =
-      new WhileMatchFilter(L_TS_FILTER);
+  private static WhileMatchFilter WHILE_FILTER = new 
WhileMatchFilter(L_TS_FILTER);
   private static String STR_WHILE_FILTER =
       WHILE_FILTER.getClass().getSimpleName() + " " + STR_L_TS_FILTER;
 
   private static KeyOnlyFilter KEY_ONLY_FILTER = new KeyOnlyFilter();
-  private static String STR_KEY_ONLY_FILTER =
-      KEY_ONLY_FILTER.getClass().getSimpleName();
+  private static String STR_KEY_ONLY_FILTER = 
KEY_ONLY_FILTER.getClass().getSimpleName();
 
-  private static FirstKeyOnlyFilter FIRST_KEY_ONLY_FILTER =
-      new FirstKeyOnlyFilter();
+  private static FirstKeyOnlyFilter FIRST_KEY_ONLY_FILTER = new 
FirstKeyOnlyFilter();
   private static String STR_FIRST_KEY_ONLY_FILTER =
       FIRST_KEY_ONLY_FILTER.getClass().getSimpleName();
 
   private static CompareOp CMP_OP = CompareOp.EQUAL;
-  private static byte[] CMP_VALUE = "value".getBytes(StandardCharsets.UTF_8);
+  private static byte[] CMP_VALUE = Bytes.toBytes("value");
   private static BinaryComparator BC = new BinaryComparator(CMP_VALUE);
   private static DependentColumnFilter DC_FILTER =
       new DependentColumnFilter(FAMILY, QUALIFIER, true, CMP_OP, BC);
@@ -183,14 +165,12 @@ public class TestOperation {
   private static String STR_FAMILY_FILTER =
       FAMILY_FILTER.getClass().getSimpleName() + " (EQUAL, value)";
 
-  private static QualifierFilter QUALIFIER_FILTER =
-      new QualifierFilter(CMP_OP, BC);
+  private static QualifierFilter QUALIFIER_FILTER = new 
QualifierFilter(CMP_OP, BC);
   private static String STR_QUALIFIER_FILTER =
       QUALIFIER_FILTER.getClass().getSimpleName() + " (EQUAL, value)";
 
   private static RowFilter ROW_FILTER = new RowFilter(CMP_OP, BC);
-  private static String STR_ROW_FILTER =
-      ROW_FILTER.getClass().getSimpleName() + " (EQUAL, value)";
+  private static String STR_ROW_FILTER = ROW_FILTER.getClass().getSimpleName() 
+ " (EQUAL, value)";
 
   private static ValueFilter VALUE_FILTER = new ValueFilter(CMP_OP, BC);
   private static String STR_VALUE_FILTER =
@@ -207,19 +187,16 @@ public class TestOperation {
       new SingleColumnValueExcludeFilter(FAMILY, QUALIFIER, CMP_OP, CMP_VALUE);
   private static String STR_SCVE_FILTER = String.format("%s (%s, %s, %s, %s)",
       SCVE_FILTER.getClass().getSimpleName(), Bytes.toStringBinary(FAMILY),
-      Bytes.toStringBinary(QUALIFIER), CMP_OP.name(),
-      Bytes.toStringBinary(CMP_VALUE));
+      Bytes.toStringBinary(QUALIFIER), CMP_OP.name(), 
Bytes.toStringBinary(CMP_VALUE));
 
   private static FilterList AND_FILTER_LIST = new FilterList(
-      Operator.MUST_PASS_ALL, Arrays.asList((Filter) TS_FILTER, L_TS_FILTER,
-          CR_FILTER));
+      Operator.MUST_PASS_ALL, Arrays.asList((Filter) TS_FILTER, L_TS_FILTER, 
CR_FILTER));
   private static String STR_AND_FILTER_LIST = String.format(
       "%s AND (3/3): [%s, %s, %s]", AND_FILTER_LIST.getClass().getSimpleName(),
       STR_TS_FILTER, STR_L_TS_FILTER, STR_CR_FILTER);
 
   private static FilterList OR_FILTER_LIST = new FilterList(
-      Operator.MUST_PASS_ONE, Arrays.asList((Filter) TS_FILTER, L_TS_FILTER,
-          CR_FILTER));
+      Operator.MUST_PASS_ONE, Arrays.asList((Filter) TS_FILTER, L_TS_FILTER, 
CR_FILTER));
   private static String STR_OR_FILTER_LIST = String.format(
       "%s OR (3/3): [%s, %s, %s]", AND_FILTER_LIST.getClass().getSimpleName(),
       STR_TS_FILTER, STR_L_TS_FILTER, STR_CR_FILTER);

http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
index f66e47c..7229ee1 100644
--- 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
@@ -32,7 +32,6 @@ import com.google.common.base.Strings;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
-import java.nio.charset.StandardCharsets;
 
 import javax.security.auth.callback.Callback;
 import javax.security.auth.callback.CallbackHandler;
@@ -48,6 +47,7 @@ import javax.security.sasl.SaslClient;
 import 
org.apache.hadoop.hbase.security.AbstractHBaseSaslRpcClient.SaslClientCallbackHandler;
 import org.apache.hadoop.hbase.testclassification.SecurityTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.security.token.Token;
@@ -101,18 +101,15 @@ public class TestHBaseSaslRpcClient {
   @Test
   public void testSaslClientCallbackHandler() throws 
UnsupportedCallbackException {
     final Token<? extends TokenIdentifier> token = createTokenMock();
-    when(token.getIdentifier())
-      .thenReturn(DEFAULT_USER_NAME.getBytes(StandardCharsets.UTF_8));
-    when(token.getPassword())
-      .thenReturn(DEFAULT_USER_PASSWORD.getBytes(StandardCharsets.UTF_8));
+    when(token.getIdentifier()).thenReturn(Bytes.toBytes(DEFAULT_USER_NAME));
+    when(token.getPassword()).thenReturn(Bytes.toBytes(DEFAULT_USER_PASSWORD));
 
     final NameCallback nameCallback = mock(NameCallback.class);
     final PasswordCallback passwordCallback = mock(PasswordCallback.class);
     final RealmCallback realmCallback = mock(RealmCallback.class);
     final RealmChoiceCallback realmChoiceCallback = 
mock(RealmChoiceCallback.class);
 
-    Callback[] callbackArray = {nameCallback, passwordCallback,
-        realmCallback, realmChoiceCallback};
+    Callback[] callbackArray = {nameCallback, passwordCallback, realmCallback, 
realmChoiceCallback};
     final SaslClientCallbackHandler saslClCallbackHandler = new 
SaslClientCallbackHandler(token);
     saslClCallbackHandler.handle(callbackArray);
     verify(nameCallback).setName(anyString());
@@ -123,10 +120,8 @@ public class TestHBaseSaslRpcClient {
   @Test
   public void testSaslClientCallbackHandlerWithException() {
     final Token<? extends TokenIdentifier> token = createTokenMock();
-    when(token.getIdentifier())
-      .thenReturn(DEFAULT_USER_NAME.getBytes(StandardCharsets.UTF_8));
-    when(token.getPassword())
-      .thenReturn(DEFAULT_USER_PASSWORD.getBytes(StandardCharsets.UTF_8));
+    when(token.getIdentifier()).thenReturn(Bytes.toBytes(DEFAULT_USER_NAME));
+    when(token.getPassword()).thenReturn(Bytes.toBytes(DEFAULT_USER_PASSWORD));
     final SaslClientCallbackHandler saslClCallbackHandler = new 
SaslClientCallbackHandler(token);
     try {
       saslClCallbackHandler.handle(new Callback[] { 
mock(TextOutputCallback.class) });
@@ -296,10 +291,8 @@ public class TestHBaseSaslRpcClient {
       throws IOException {
     Token<? extends TokenIdentifier> token = createTokenMock();
     if (!Strings.isNullOrEmpty(principal) && !Strings.isNullOrEmpty(password)) 
{
-      when(token.getIdentifier())
-        .thenReturn(DEFAULT_USER_NAME.getBytes(StandardCharsets.UTF_8));
-      when(token.getPassword())
-        .thenReturn(DEFAULT_USER_PASSWORD.getBytes(StandardCharsets.UTF_8));
+      when(token.getIdentifier()).thenReturn(Bytes.toBytes(DEFAULT_USER_NAME));
+      
when(token.getPassword()).thenReturn(Bytes.toBytes(DEFAULT_USER_PASSWORD));
     }
     return token;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
----------------------------------------------------------------------
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
index 00c05cd..9fd84e6 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
@@ -19,11 +19,6 @@
 
 package org.apache.hadoop.hbase.util;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
-
 import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
 import java.io.ByteArrayInputStream;
@@ -44,6 +39,11 @@ import java.nio.charset.StandardCharsets;
 import java.util.zip.GZIPInputStream;
 import java.util.zip.GZIPOutputStream;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+
 /**
  * Encodes and decodes to and from Base64 notation.
  *
@@ -929,7 +929,7 @@ public class Base64 {
       bytes = s.getBytes(PREFERRED_ENCODING);
 
     } catch (UnsupportedEncodingException uee) {
-      bytes = s.getBytes(StandardCharsets.UTF_8);
+      bytes = Bytes.toBytes(s);
     } // end catch
 
     // Decode

http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
----------------------------------------------------------------------
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
index 7080078..bdda494 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
@@ -18,10 +18,9 @@
 
 package org.apache.hadoop.hbase;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
 
 import java.io.IOException;
-import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.NavigableMap;
@@ -332,8 +331,8 @@ public class TestCellUtil {
   @Test
   public void testFindCommonPrefixInFlatKey() {
     // The whole key matching case
-    KeyValue kv1 = new KeyValue("r1".getBytes(StandardCharsets.UTF_8),
-      "f1".getBytes(StandardCharsets.UTF_8), 
"q1".getBytes(StandardCharsets.UTF_8), null);
+    KeyValue kv1 = new KeyValue(Bytes.toBytes("r1"), Bytes.toBytes("f1"),
+        Bytes.toBytes("q1"), null);
     Assert.assertEquals(kv1.getKeyLength(),
         CellUtil.findCommonPrefixInFlatKey(kv1, kv1, true, true));
     Assert.assertEquals(kv1.getKeyLength(),
@@ -341,35 +340,35 @@ public class TestCellUtil {
     Assert.assertEquals(kv1.getKeyLength() - KeyValue.TIMESTAMP_TYPE_SIZE,
         CellUtil.findCommonPrefixInFlatKey(kv1, kv1, true, false));
     // The rk length itself mismatch
-    KeyValue kv2 = new KeyValue("r12".getBytes(StandardCharsets.UTF_8),
-      "f1".getBytes(StandardCharsets.UTF_8), 
"q1".getBytes(StandardCharsets.UTF_8), null);
+    KeyValue kv2 = new KeyValue(Bytes.toBytes("r12"), Bytes.toBytes("f1"),
+        Bytes.toBytes("q1"), null);
     Assert.assertEquals(1, CellUtil.findCommonPrefixInFlatKey(kv1, kv2, true, 
true));
     // part of rk is same
-    KeyValue kv3 = new KeyValue("r14".getBytes(StandardCharsets.UTF_8),
-      "f1".getBytes(StandardCharsets.UTF_8), 
"q1".getBytes(StandardCharsets.UTF_8), null);
-    Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + 
"r1".getBytes(StandardCharsets.UTF_8).length,
+    KeyValue kv3 = new KeyValue(Bytes.toBytes("r14"), Bytes.toBytes("f1"),
+        Bytes.toBytes("q1"), null);
+    Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + Bytes.toBytes("r1").length,
         CellUtil.findCommonPrefixInFlatKey(kv2, kv3, true, true));
     // entire rk is same but different cf name
-    KeyValue kv4 = new KeyValue("r14".getBytes(StandardCharsets.UTF_8),
-      "f2".getBytes(StandardCharsets.UTF_8), 
"q1".getBytes(StandardCharsets.UTF_8), null);
+    KeyValue kv4 = new KeyValue(Bytes.toBytes("r14"), Bytes.toBytes("f2"),
+        Bytes.toBytes("q1"), null);
     Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv3.getRowLength() + 
KeyValue.FAMILY_LENGTH_SIZE
-        + "f".getBytes(StandardCharsets.UTF_8).length,
+        + Bytes.toBytes("f").length,
         CellUtil.findCommonPrefixInFlatKey(kv3, kv4, false, true));
     // rk and family are same and part of qualifier
-    KeyValue kv5 = new KeyValue("r14".getBytes(StandardCharsets.UTF_8),
-      "f2".getBytes(StandardCharsets.UTF_8), 
"q123".getBytes(StandardCharsets.UTF_8), null);
+    KeyValue kv5 = new KeyValue(Bytes.toBytes("r14"), Bytes.toBytes("f2"),
+        Bytes.toBytes("q123"), null);
     Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv3.getRowLength() + 
KeyValue.FAMILY_LENGTH_SIZE
         + kv4.getFamilyLength() + kv4.getQualifierLength(),
         CellUtil.findCommonPrefixInFlatKey(kv4, kv5, true, true));
     // rk, cf and q are same. ts differs
-    KeyValue kv6 = new KeyValue("rk".getBytes(StandardCharsets.UTF_8), 1234L);
-    KeyValue kv7 = new KeyValue("rk".getBytes(StandardCharsets.UTF_8), 1235L);
+    KeyValue kv6 = new KeyValue(Bytes.toBytes("rk"), 1234L);
+    KeyValue kv7 = new KeyValue(Bytes.toBytes("rk"), 1235L);
     // only last byte out of 8 ts bytes in ts part differs
     Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv6.getRowLength() + 
KeyValue.FAMILY_LENGTH_SIZE
         + kv6.getFamilyLength() + kv6.getQualifierLength() + 7,
         CellUtil.findCommonPrefixInFlatKey(kv6, kv7, true, true));
     // rk, cf, q and ts are same. Only type differs
-    KeyValue kv8 = new KeyValue("rk".getBytes(StandardCharsets.UTF_8), 1234L, 
Type.Delete);
+    KeyValue kv8 = new KeyValue(Bytes.toBytes("rk"), 1234L, Type.Delete);
     Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv6.getRowLength() + 
KeyValue.FAMILY_LENGTH_SIZE
         + kv6.getFamilyLength() + kv6.getQualifierLength() + 
KeyValue.TIMESTAMP_SIZE,
         CellUtil.findCommonPrefixInFlatKey(kv6, kv8, true, true));

http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java
----------------------------------------------------------------------
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java
 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java
index bc6edb8..6b0ff9c 100644
--- 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java
@@ -22,7 +22,6 @@ import static org.junit.Assert.assertNotNull;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.net.URLEncoder;
-import java.nio.charset.StandardCharsets;
 import java.security.Key;
 import java.security.KeyStore;
 import java.security.MessageDigest;
@@ -34,6 +33,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -52,7 +52,7 @@ public class TestKeyStoreKeyProvider {
 
   @BeforeClass
   public static void setUp() throws Exception {
-    KEY = 
MessageDigest.getInstance("SHA-256").digest(ALIAS.getBytes(StandardCharsets.UTF_8));
+    KEY = MessageDigest.getInstance("SHA-256").digest(Bytes.toBytes(ALIAS));
     // Create a JKECS store containing a test secret key
     KeyStore store = KeyStore.getInstance("JCEKS");
     store.load(null, PASSWORD.toCharArray());

http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java
----------------------------------------------------------------------
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java
index f02087c..09ba655 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java
@@ -21,7 +21,6 @@ import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 
 import java.lang.reflect.Constructor;
-import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Comparator;
@@ -71,16 +70,11 @@ public class TestStruct {
     };
 
     Object[][] pojo2Args = {
-        new Object[] { new byte[0], "it".getBytes(StandardCharsets.UTF_8), 
"was",
-          "the".getBytes(StandardCharsets.UTF_8) },
-        new Object[] { "best".getBytes(StandardCharsets.UTF_8), new byte[0], 
"of",
-          "times,".getBytes(StandardCharsets.UTF_8) },
-        new Object[] { "it".getBytes(StandardCharsets.UTF_8),
-          "was".getBytes(StandardCharsets.UTF_8), "",
-          "the".getBytes(StandardCharsets.UTF_8) },
-        new Object[] { "worst".getBytes(StandardCharsets.UTF_8),
-          "of".getBytes(StandardCharsets.UTF_8), "times,", new byte[0] },
-        new Object[] { new byte[0], new byte[0], "", new byte[0] },
+      new Object[] { new byte[0], Bytes.toBytes("it"), "was", 
Bytes.toBytes("the") },
+      new Object[] { Bytes.toBytes("best"), new byte[0], "of", 
Bytes.toBytes("times,") },
+      new Object[] { Bytes.toBytes("it"), Bytes.toBytes("was"), "", 
Bytes.toBytes("the") },
+      new Object[] { Bytes.toBytes("worst"), Bytes.toBytes("of"), "times,", 
new byte[0] },
+      new Object[] { new byte[0], new byte[0], "", new byte[0] },
     };
 
     Object[][] params = new Object[][] {

http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
----------------------------------------------------------------------
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
index 3767e87..4ff800c 100644
--- 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
@@ -19,16 +19,14 @@ package org.apache.hadoop.hbase.util;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
-import java.nio.charset.StandardCharsets;
 import java.util.HashSet;
 import java.util.Random;
 import java.util.Set;
 
+import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-
 @Category(SmallTests.class)
 public class TestLoadTestKVGenerator {
 
@@ -41,8 +39,8 @@ public class TestLoadTestKVGenerator {
   @Test
   public void testValueLength() {
     for (int i = 0; i < 1000; ++i) {
-      byte[] v = 
gen.generateRandomSizeValue(Integer.toString(i).getBytes(StandardCharsets.UTF_8),
-          String.valueOf(rand.nextInt()).getBytes(StandardCharsets.UTF_8));
+      byte[] v = 
gen.generateRandomSizeValue(Bytes.toBytes(Integer.toString(i)),
+          Bytes.toBytes(String.valueOf(rand.nextInt())));
       assertTrue(MIN_LEN <= v.length);
       assertTrue(v.length <= MAX_LEN);
     }
@@ -52,8 +50,8 @@ public class TestLoadTestKVGenerator {
   public void testVerification() {
     for (int i = 0; i < 1000; ++i) {
       for (int qualIndex = 0; qualIndex < 20; ++qualIndex) {
-        byte[] qual = 
String.valueOf(qualIndex).getBytes(StandardCharsets.UTF_8);
-        byte[] rowKey = 
LoadTestKVGenerator.md5PrefixedKey(i).getBytes(StandardCharsets.UTF_8);
+        byte[] qual = Bytes.toBytes(String.valueOf(qualIndex));
+        byte[] rowKey = Bytes.toBytes(LoadTestKVGenerator.md5PrefixedKey(i));
         byte[] v = gen.generateRandomSizeValue(rowKey, qual);
         assertTrue(LoadTestKVGenerator.verify(v, rowKey, qual));
         v[0]++;

http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java
----------------------------------------------------------------------
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java
index c5d661f..90636a8 100644
--- 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java
@@ -22,7 +22,6 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.fail;
 
 import java.math.BigDecimal;
-import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Collections;
 
@@ -872,32 +871,32 @@ public class TestOrderedBytes {
   @Test
   public void testBlobVar() {
     byte[][] vals =
-        { "".getBytes(StandardCharsets.UTF_8),
-          "foo".getBytes(StandardCharsets.UTF_8),
-          "foobarbazbub".getBytes(StandardCharsets.UTF_8),
-          { (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, 
(byte) 0xaa,
-            (byte) 0xaa, /* 7 bytes of alternating bits; testing around 
HBASE-9893 */ },
-          { (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, 
(byte) 0xaa,
-            (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, 
(byte) 0xaa },
-          { (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, 
(byte) 0xaa,
-            (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, 
(byte) 0xaa,
-            (byte) 0xaa, (byte) 0xaa, /* 14 bytes of alternating bits; testing 
around HBASE-9893 */ },
-          { (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, 
(byte) 0x55,
-            (byte) 0x55, /* 7 bytes of alternating bits; testing around 
HBASE-9893 */ },
-          { (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, 
(byte) 0x55,
-            (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, 
(byte) 0x55 },
-          { (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, 
(byte) 0x55,
-            (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, 
(byte) 0x55,
-            (byte) 0x55, (byte) 0x55, /* 14 bytes of alternating bits; testing 
around HBASE-9893 */ },
-          "1".getBytes(StandardCharsets.UTF_8),
-          "22".getBytes(StandardCharsets.UTF_8),
-          "333".getBytes(StandardCharsets.UTF_8),
-          "4444".getBytes(StandardCharsets.UTF_8),
-          "55555".getBytes(StandardCharsets.UTF_8),
-          "666666".getBytes(StandardCharsets.UTF_8),
-          "7777777".getBytes(StandardCharsets.UTF_8),
-          "88888888".getBytes(StandardCharsets.UTF_8)
-        };
+      { Bytes.toBytes(""),
+        Bytes.toBytes("foo"),
+        Bytes.toBytes("foobarbazbub"),
+        { (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, 
(byte) 0xaa,
+          (byte) 0xaa, /* 7 bytes of alternating bits; testing around 
HBASE-9893 */ },
+        { (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, 
(byte) 0xaa,
+          (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, 
(byte) 0xaa },
+        { (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, 
(byte) 0xaa,
+          (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, 
(byte) 0xaa,
+          (byte) 0xaa, (byte) 0xaa, /* 14 bytes of alternating bits; testing 
around HBASE-9893 */ },
+        { (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, 
(byte) 0x55,
+          (byte) 0x55, /* 7 bytes of alternating bits; testing around 
HBASE-9893 */ },
+        { (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, 
(byte) 0x55,
+          (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, 
(byte) 0x55 },
+        { (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, 
(byte) 0x55,
+          (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, 
(byte) 0x55,
+          (byte) 0x55, (byte) 0x55, /* 14 bytes of alternating bits; testing 
around HBASE-9893 */ },
+        Bytes.toBytes("1"),
+        Bytes.toBytes("22"),
+        Bytes.toBytes("333"),
+        Bytes.toBytes("4444"),
+        Bytes.toBytes("55555"),
+        Bytes.toBytes("666666"),
+        Bytes.toBytes("7777777"),
+        Bytes.toBytes("88888888")
+      };
 
     /*
      * assert encoded values match decoded values. encode into target buffer
@@ -967,9 +966,9 @@ public class TestOrderedBytes {
   @Test
   public void testBlobCopy() {
     byte[][] vals =
-      { "".getBytes(StandardCharsets.UTF_8),
-        "foo".getBytes(StandardCharsets.UTF_8),
-        "foobarbazbub".getBytes(StandardCharsets.UTF_8),
+      { Bytes.toBytes(""),
+        Bytes.toBytes("foo"),
+        Bytes.toBytes("foobarbazbub"),
         { (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, 
(byte) 0xaa,
           (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, 
(byte) 0xaa },
         { (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, 
(byte) 0x55,
@@ -1044,9 +1043,9 @@ public class TestOrderedBytes {
       byte[] a = new byte[3 + (Order.ASCENDING == ord ? 1 : 2) + 2];
       PositionedByteRange buf =
           new SimplePositionedMutableByteRange(a, 1, 3 + (Order.ASCENDING == 
ord ? 1 : 2));
-      OrderedBytes.encodeBlobCopy(buf, 
"foobarbaz".getBytes(StandardCharsets.UTF_8), 3, 3, ord);
+      OrderedBytes.encodeBlobCopy(buf, Bytes.toBytes("foobarbaz"), 3, 3, ord);
       buf.setPosition(0);
-      assertArrayEquals("bar".getBytes(StandardCharsets.UTF_8), 
OrderedBytes.decodeBlobCopy(buf));
+      assertArrayEquals(Bytes.toBytes("bar"), 
OrderedBytes.decodeBlobCopy(buf));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGzipFilter.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGzipFilter.java 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGzipFilter.java
index 5a9344a..33130e4 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGzipFilter.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGzipFilter.java
@@ -25,7 +25,6 @@ import static org.junit.Assert.assertTrue;
 
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
-import java.nio.charset.StandardCharsets;
 import java.util.zip.GZIPInputStream;
 import java.util.zip.GZIPOutputStream;
 
@@ -33,7 +32,6 @@ import org.apache.commons.httpclient.Header;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Get;
@@ -42,6 +40,7 @@ import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.rest.client.Client;
 import org.apache.hadoop.hbase.rest.client.Cluster;
 import org.apache.hadoop.hbase.rest.client.Response;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -147,7 +146,7 @@ public class TestGzipFilter {
     headers[1] = new Header("Accept", Constants.MIMETYPE_JSON);
     headers[2] = new Header("Accept-Encoding", "gzip");
     Response response = client.post("/" + TABLE + "/scanner", headers,
-        "<Scanner/>".getBytes(StandardCharsets.UTF_8));
+        Bytes.toBytes("<Scanner/>"));
     assertEquals(201, response.getCode());
     String scannerUrl = response.getLocation();
     assertNotNull(scannerUrl);

http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java
index c12bbb6..ac473e3 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java
@@ -23,7 +23,6 @@ import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
 import java.io.IOException;
-import java.nio.charset.StandardCharsets;
 
 import javax.xml.bind.UnmarshalException;
 
@@ -33,6 +32,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.rest.Constants;
 import org.apache.hadoop.hbase.rest.model.StorageClusterVersionModel;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.util.StringUtils;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -50,7 +50,7 @@ public class TestXmlParsing {
         + "<ClusterVersion Version=\"2.0.0\"/>";
     Client client = mock(Client.class);
     RemoteAdmin admin = new RemoteAdmin(client, HBaseConfiguration.create(), 
null);
-    Response resp = new Response(200, null, 
xml.getBytes(StandardCharsets.UTF_8));
+    Response resp = new Response(200, null, Bytes.toBytes(xml));
 
     when(client.get("/version/cluster", 
Constants.MIMETYPE_XML)).thenReturn(resp);
 
@@ -66,7 +66,7 @@ public class TestXmlParsing {
         + " <ClusterVersion>&xee;</ClusterVersion>";
     Client client = mock(Client.class);
     RemoteAdmin admin = new RemoteAdmin(client, HBaseConfiguration.create(), 
null);
-    Response resp = new Response(200, null, 
externalEntitiesXml.getBytes(StandardCharsets.UTF_8));
+    Response resp = new Response(200, null, 
Bytes.toBytes(externalEntitiesXml));
 
     when(client.get("/version/cluster", 
Constants.MIMETYPE_XML)).thenReturn(resp);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
index fc4aee3..492b5d1 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
@@ -17,12 +17,13 @@
  */
 package org.apache.hadoop.hbase.mapreduce;
 
+import com.google.common.annotations.VisibleForTesting;
+
 import java.io.IOException;
 import java.io.UnsupportedEncodingException;
 import java.net.InetSocketAddress;
 import java.net.URLDecoder;
 import java.net.URLEncoder;
-import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
@@ -33,8 +34,6 @@ import java.util.UUID;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -47,6 +46,8 @@ import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.HTable;
@@ -79,8 +80,6 @@ import 
org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
 
-import com.google.common.annotations.VisibleForTesting;
-
 /**
  * Writes HFiles. Passed Cells must arrive in order.
  * Writes current time as the sequence id for the file. Sets the major 
compacted
@@ -647,8 +646,7 @@ public class HFileOutputFormat2
         continue;
       }
       try {
-        confValMap.put(URLDecoder.decode(familySplit[0], "UTF-8")
-              .getBytes(StandardCharsets.UTF_8),
+        confValMap.put(Bytes.toBytes(URLDecoder.decode(familySplit[0], 
"UTF-8")),
             URLDecoder.decode(familySplit[1], "UTF-8"));
       } catch (UnsupportedEncodingException e) {
         // will not happen with UTF-8 encoding

http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 92e8ef7..faab525 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -1016,7 +1016,7 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
           this.stores.put(store.getFamily().getName(), store);
 
           long storeMaxSequenceId = store.getMaxSequenceId();
-          
maxSeqIdInStores.put(store.getColumnFamilyName().getBytes(StandardCharsets.UTF_8),
+          maxSeqIdInStores.put(Bytes.toBytes(store.getColumnFamilyName()),
               storeMaxSequenceId);
           if (maxSeqId == -1 || storeMaxSequenceId > maxSeqId) {
             maxSeqId = storeMaxSequenceId;

http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index 1cb2e46..37ea94b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -18,6 +18,17 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Maps;
+import com.google.protobuf.BlockingRpcChannel;
+import com.google.protobuf.Descriptors;
+import com.google.protobuf.Message;
+import com.google.protobuf.RpcCallback;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.Service;
+import com.google.protobuf.ServiceException;
+
 import java.io.IOException;
 import java.io.InterruptedIOException;
 import java.lang.Thread.UncaughtExceptionHandler;
@@ -26,7 +37,6 @@ import java.lang.reflect.Constructor;
 import java.net.BindException;
 import java.net.InetAddress;
 import java.net.InetSocketAddress;
-import java.nio.charset.StandardCharsets;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -189,18 +199,6 @@ import org.apache.hadoop.util.StringUtils;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.KeeperException.NoNodeException;
 import org.apache.zookeeper.data.Stat;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Maps;
-import com.google.protobuf.BlockingRpcChannel;
-import com.google.protobuf.Descriptors;
-import com.google.protobuf.Message;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.Service;
-import com.google.protobuf.ServiceException;
-
 import sun.misc.Signal;
 import sun.misc.SignalHandler;
 
@@ -2958,14 +2956,12 @@ public class HRegionServer extends HasThread implements
     }
 
     final Boolean previous = this.regionsInTransitionInRS
-      .putIfAbsent(encodedName.getBytes(StandardCharsets.UTF_8),
-        Boolean.FALSE);
+      .putIfAbsent(Bytes.toBytes(encodedName), Boolean.FALSE);
 
     if (Boolean.TRUE.equals(previous)) {
       LOG.info("Received CLOSE for the region:" + encodedName + " , which we 
are already " +
           "trying to OPEN. Cancelling OPENING.");
-      if 
(!regionsInTransitionInRS.replace(encodedName.getBytes(StandardCharsets.UTF_8),
-            previous, Boolean.FALSE)){
+      if (!regionsInTransitionInRS.replace(Bytes.toBytes(encodedName), 
previous, Boolean.FALSE)) {
         // The replace failed. That should be an exceptional case, but 
theoretically it can happen.
         // We're going to try to do a standard close then.
         LOG.warn("The opening for region " + encodedName + " was done before 
we could cancel it." +
@@ -2995,7 +2991,7 @@ public class HRegionServer extends HasThread implements
 
     if (actualRegion == null) {
       LOG.error("Received CLOSE for a region which is not online, and we're 
not opening.");
-      
this.regionsInTransitionInRS.remove(encodedName.getBytes(StandardCharsets.UTF_8));
+      this.regionsInTransitionInRS.remove(Bytes.toBytes(encodedName));
       // The master deletes the znode when it receives this exception.
       throw new NotServingRegionException("The region " + encodedName +
           " is not online, and is not opening.");

http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java
index 4fbce90..f6581f3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java
@@ -18,11 +18,13 @@
  */
 package org.apache.hadoop.hbase.zookeeper;
 
+import com.google.common.annotations.VisibleForTesting;
+
 import java.io.BufferedReader;
 import java.io.File;
-import java.io.InterruptedIOException;
 import java.io.IOException;
 import java.io.InputStreamReader;
+import java.io.InterruptedIOException;
 import java.io.OutputStream;
 import java.io.Reader;
 import java.net.BindException;
@@ -35,16 +37,15 @@ import java.util.Random;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.zookeeper.server.NIOServerCnxnFactory;
 import org.apache.zookeeper.server.ZooKeeperServer;
 import org.apache.zookeeper.server.persistence.FileTxnLog;
 
-import com.google.common.annotations.VisibleForTesting;
-
 /**
  * TODO: Most of the code in this class is ripped from ZooKeeper tests. Instead
  * of redoing it, we should contribute updates to their code which let us more
@@ -406,7 +407,7 @@ public class MiniZooKeeperCluster {
         Socket sock = new Socket("localhost", port);
         try {
           OutputStream outstream = sock.getOutputStream();
-          outstream.write("stat".getBytes(StandardCharsets.UTF_8));
+          outstream.write(Bytes.toBytes("stat"));
           outstream.flush();
         } finally {
           sock.close();
@@ -436,7 +437,7 @@ public class MiniZooKeeperCluster {
         BufferedReader reader = null;
         try {
           OutputStream outstream = sock.getOutputStream();
-          outstream.write("stat".getBytes(StandardCharsets.UTF_8));
+          outstream.write(Bytes.toBytes("stat"));
           outstream.flush();
 
           Reader isr = new InputStreamReader(sock.getInputStream(), 
StandardCharsets.UTF_8);

Reply via email to