This is an automated email from the ASF dual-hosted git repository.

janh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/master by this push:
     new 12bcb87  HBASE-22199 Replaced UTF-8 String with StandardCharsets.UTF_8
12bcb87 is described below

commit 12bcb879da45c8ed2ff7011409914f4d12bca6af
Author: Jan Hentschel <[email protected]>
AuthorDate: Wed Apr 10 13:59:41 2019 +0200

    HBASE-22199 Replaced UTF-8 String with StandardCharsets.UTF_8
---
 .../main/java/org/apache/hadoop/hbase/thrift/DemoClient.java   |  9 ++-------
 .../java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java    |  8 +-------
 .../apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java  | 10 +++++-----
 .../hadoop/hbase/mapreduce/TsvImporterCustomTestMapper.java    |  3 ++-
 .../java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java |  3 ++-
 .../test/java/org/apache/hadoop/hbase/rest/TestTableScan.java  |  8 ++++----
 .../hadoop/hbase/security/token/TestAuthenticationKey.java     |  5 +++--
 7 files changed, 19 insertions(+), 27 deletions(-)

diff --git 
a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java 
b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
index c161cba..c99d04a 100644
--- 
a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
+++ 
b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
@@ -18,7 +18,6 @@
  */
 package org.apache.hadoop.hbase.thrift;
 
-import java.io.UnsupportedEncodingException;
 import java.nio.ByteBuffer;
 import java.nio.charset.CharacterCodingException;
 import java.nio.charset.Charset;
@@ -42,6 +41,7 @@ import org.apache.hadoop.hbase.thrift.generated.Hbase;
 import org.apache.hadoop.hbase.thrift.generated.Mutation;
 import org.apache.hadoop.hbase.thrift.generated.TCell;
 import org.apache.hadoop.hbase.thrift.generated.TRowResult;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.thrift.protocol.TBinaryProtocol;
 import org.apache.thrift.protocol.TProtocol;
 import org.apache.thrift.transport.TSaslClientTransport;
@@ -115,12 +115,7 @@ public class DemoClient {
 
   // Helper to translate strings to UTF8 bytes
   private byte[] bytes(String s) {
-    try {
-      return s.getBytes("UTF-8");
-    } catch (UnsupportedEncodingException e) {
-      LOG.error("CharSetName {} not supported", s, e);
-      return null;
-    }
+    return Bytes.toBytes(s);
   }
 
   private void run() throws Exception {
diff --git 
a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java
 
b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java
index fd214d1..106d8c6 100644
--- 
a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java
+++ 
b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java
@@ -18,7 +18,6 @@
  */
 package org.apache.hadoop.hbase.thrift;
 
-import java.io.UnsupportedEncodingException;
 import java.nio.ByteBuffer;
 import java.nio.charset.CharacterCodingException;
 import java.nio.charset.Charset;
@@ -113,12 +112,7 @@ public class HttpDoAsClient {
 
   // Helper to translate strings to UTF8 bytes
   private byte[] bytes(String s) {
-    try {
-      return s.getBytes("UTF-8");
-    } catch (UnsupportedEncodingException e) {
-      LOG.error("CharSetName {} not supported", s, e);
-      return null;
-    }
+    return Bytes.toBytes(s);
   }
 
   private void run() throws Exception {
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index f4f8236..5c0bb2b 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -401,7 +401,7 @@ public class TestHFileOutputFormat2  {
       HFile.Reader rd =
           HFile.createReader(fs, file[0].getPath(), new CacheConfig(conf), 
true, conf);
       Map<byte[],byte[]> finfo = rd.loadFileInfo();
-      byte[] range = finfo.get("TIMERANGE".getBytes("UTF-8"));
+      byte[] range = finfo.get(Bytes.toBytes("TIMERANGE"));
       assertNotNull(range);
 
       // unmarshall and check values.
@@ -829,7 +829,7 @@ public class TestHFileOutputFormat2  {
       for (Entry<String, Algorithm> entry : familyToCompression.entrySet()) {
         assertEquals("Compression configuration incorrect for column family:"
             + entry.getKey(), entry.getValue(),
-            
retrievedFamilyToCompressionMap.get(entry.getKey().getBytes("UTF-8")));
+            
retrievedFamilyToCompressionMap.get(Bytes.toBytes(entry.getKey())));
       }
     }
   }
@@ -903,7 +903,7 @@ public class TestHFileOutputFormat2  {
       for (Entry<String, BloomType> entry : familyToBloomType.entrySet()) {
         assertEquals("BloomType configuration incorrect for column family:"
             + entry.getKey(), entry.getValue(),
-            
retrievedFamilyToBloomTypeMap.get(entry.getKey().getBytes("UTF-8")));
+            retrievedFamilyToBloomTypeMap.get(Bytes.toBytes(entry.getKey())));
       }
     }
   }
@@ -976,7 +976,7 @@ public class TestHFileOutputFormat2  {
           ) {
         assertEquals("BlockSize configuration incorrect for column family:"
             + entry.getKey(), entry.getValue(),
-            
retrievedFamilyToBlockSizeMap.get(entry.getKey().getBytes("UTF-8")));
+            retrievedFamilyToBlockSizeMap.get(Bytes.toBytes(entry.getKey())));
       }
     }
   }
@@ -1053,7 +1053,7 @@ public class TestHFileOutputFormat2  {
       for (Entry<String, DataBlockEncoding> entry : 
familyToDataBlockEncoding.entrySet()) {
         assertEquals("DataBlockEncoding configuration incorrect for column 
family:"
             + entry.getKey(), entry.getValue(),
-            
retrievedFamilyToDataBlockEncodingMap.get(entry.getKey().getBytes("UTF-8")));
+            
retrievedFamilyToDataBlockEncodingMap.get(Bytes.toBytes(entry.getKey())));
       }
     }
   }
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TsvImporterCustomTestMapper.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TsvImporterCustomTestMapper.java
index aea5036..0c73527 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TsvImporterCustomTestMapper.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TsvImporterCustomTestMapper.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hbase.mapreduce;
 
+import java.nio.charset.StandardCharsets;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -51,7 +52,7 @@ public class TsvImporterCustomTestMapper extends 
TsvImporterMapper {
 
     // do some basic line parsing
     byte[] lineBytes = value.getBytes();
-    String[] valueTokens = new String(lineBytes, "UTF-8").split("\u001b");
+    String[] valueTokens = new String(lineBytes, 
StandardCharsets.UTF_8).split("\u001b");
 
     // create the rowKey and Put
     ImmutableBytesWritable rowKey =
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
index 29b48e1..ef8dfc8 100644
--- 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
+++ 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
@@ -24,6 +24,7 @@ import com.google.protobuf.Message;
 import com.google.protobuf.Service;
 import com.google.protobuf.ServiceException;
 
+import java.nio.charset.StandardCharsets;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
@@ -953,7 +954,7 @@ public class RemoteHTable implements Table {
    */
   private static String toURLEncodedBytes(byte[] row) {
     try {
-      return URLEncoder.encode(new String(row, "UTF-8"), "UTF-8");
+      return URLEncoder.encode(new String(row, StandardCharsets.UTF_8), 
"UTF-8");
     } catch (UnsupportedEncodingException e) {
       throw new IllegalStateException("URLEncoder doesn't support UTF-8", e);
     }
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java
index 2bb6157..e048550 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java
@@ -594,10 +594,10 @@ public class TestTableScan {
       RowModel rowModel = rowModels.get(i);
       RowModel reversedRowModel = reversedRowModels.get(i);
 
-      assertEquals(new String(rowModel.getKey(), "UTF-8"),
-          new String(reversedRowModel.getKey(), "UTF-8"));
-      assertEquals(new String(rowModel.getCells().get(0).getValue(), "UTF-8"),
-          new String(reversedRowModel.getCells().get(0).getValue(), "UTF-8"));
+      assertEquals(new String(rowModel.getKey(), StandardCharsets.UTF_8),
+          new String(reversedRowModel.getKey(), StandardCharsets.UTF_8));
+      assertEquals(new String(rowModel.getCells().get(0).getValue(), 
StandardCharsets.UTF_8),
+          new String(reversedRowModel.getCells().get(0).getValue(), 
StandardCharsets.UTF_8));
     }
   }
 
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestAuthenticationKey.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestAuthenticationKey.java
index cdd89e0..094452f 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestAuthenticationKey.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestAuthenticationKey.java
@@ -25,6 +25,7 @@ import javax.crypto.SecretKey;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.testclassification.SecurityTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -40,7 +41,7 @@ public class TestAuthenticationKey {
   @Test
   public void test() throws UnsupportedEncodingException {
     SecretKey secret = Mockito.mock(SecretKey.class);
-    Mockito.when(secret.getEncoded()).thenReturn("secret".getBytes("UTF-8"));
+    Mockito.when(secret.getEncoded()).thenReturn(Bytes.toBytes("secret"));
 
     AuthenticationKey key = new AuthenticationKey(0, 1234, secret);
     assertEquals(key.hashCode(), new AuthenticationKey(0, 1234, 
secret).hashCode());
@@ -55,7 +56,7 @@ public class TestAuthenticationKey {
     assertNotEquals(key, otherExpiry);
 
     SecretKey other = Mockito.mock(SecretKey.class);
-    Mockito.when(secret.getEncoded()).thenReturn("other".getBytes("UTF-8"));
+    Mockito.when(secret.getEncoded()).thenReturn(Bytes.toBytes("other"));
 
     AuthenticationKey otherSecret = new AuthenticationKey(0, 1234, other);
     assertNotEquals(key.hashCode(), otherSecret.hashCode());

Reply via email to