This is an automated email from the ASF dual-hosted git repository.

nihaljain pushed a commit to branch branch-2.5
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-2.5 by this push:
     new d2958c6402e HBASE-29026 Replace some deprecated calls (#6662) (#6585)
d2958c6402e is described below

commit d2958c6402e4014e53001ce21f0c3d21ffdff721
Author: Dávid Paksy <[email protected]>
AuthorDate: Thu Feb 20 10:23:15 2025 +0100

    HBASE-29026 Replace some deprecated calls (#6662) (#6585)
    
    - Replaced the usage of the following deprecated methods:
      - java.net.URLEncoder.encode(String) -> 
java.net.URLEncoder.encode(String, String)
      - StringUtils.humanReadableInt(long) -> 
StringUtils.TraditionalBinaryPrefix.long2String(long, "", 1): For this a new 
static util method is introduced: 
org.apache.hadoop.hbase.util.Strings.humanReadableInt
      - org.apache.hadoop.fs.FileSystem.getLength(Path) -> 
getFileStatus(Path).getLen()
      - org.apache.hadoop.hbase.ServerName.getStartcode() -> 
org.apache.hadoop.hbase.ServerName.getStartCode()
    - Also removed unused imports in the touched JSP files.
    
    (cherry picked from commit 2daf846c97db3372678c6ed7b00ac97fcab43d9e)
    (cherry picked from commit 1c93e96fcf298e063fb6c8f396e45c49b7fc714b)
    
    Signed-off-by: Istvan Toth <[email protected]>
    Signed-off-by: Nihal Jain <[email protected]>
---
 .../java/org/apache/hadoop/hbase/util/Strings.java | 12 ++++++
 .../hadoop/hbase/mapreduce/PutSortReducer.java     |  4 +-
 .../hbase/mapreduce/TableInputFormatBase.java      |  3 +-
 .../hadoop/hbase/mapreduce/TextSortReducer.java    |  4 +-
 .../hadoop/hbase/snapshot/ExportSnapshot.java      | 25 ++++++------
 .../hadoop/hbase/ScanPerformanceEvaluation.java    | 46 +++++++++++-----------
 .../hbase/tmpl/master/MasterStatusTmpl.jamon       |  3 +-
 .../hadoop/hbase/io/hfile/HFileBlockIndex.java     |  6 +--
 .../hadoop/hbase/regionserver/wal/MetricsWAL.java  |  4 +-
 .../apache/hadoop/hbase/snapshot/SnapshotInfo.java |  4 +-
 .../resources/hbase-webapps/master/procedures.jsp  | 14 +------
 .../main/resources/hbase-webapps/master/quotas.jsp |  1 -
 .../resources/hbase-webapps/master/rsgroup.jsp     |  2 +-
 .../resources/hbase-webapps/master/snapshot.jsp    |  8 ++--
 .../hbase-webapps/master/snapshotsStats.jsp        | 20 +++++-----
 .../main/resources/hbase-webapps/master/table.jsp  | 22 +++++------
 .../hbase-webapps/master/tablesDetailed.jsp        |  2 -
 .../hbase-webapps/regionserver/region.jsp          |  3 +-
 .../regionserver/TestHRegionReplayEvents.java      |  6 +--
 .../compactions/MockStoreFileGenerator.java        |  4 +-
 .../hadoop/hbase/util/MultiThreadedAction.java     |  3 +-
 21 files changed, 96 insertions(+), 100 deletions(-)

diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Strings.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Strings.java
index cdf5bf63fb5..258a43dc33f 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Strings.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Strings.java
@@ -88,4 +88,16 @@ public final class Strings {
     int numPaddingCharacters = length - input.length();
     return StringUtils.repeat(padding, numPaddingCharacters) + input;
   }
+
+  /**
+   * Note: This method was taken from 
org.apache.hadoop.util.StringUtils.humanReadableInt(long).
+   * Reason: that method got deprecated and this method provides an 
easy-to-understand usage of
+   * StringUtils.TraditionalBinaryPrefix.long2String. Given an integer, return 
a string that is in
+   * an approximate, but human readable format.
+   * @param number the number to format
+   * @return a human readable form of the integer
+   */
+  public static String humanReadableInt(long number) {
+    return 
org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix.long2String(number, 
"", 1);
+  }
 }
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
index 90905090f89..a87c58d2803 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
@@ -36,8 +36,8 @@ import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.security.visibility.CellVisibility;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 
 /**
@@ -121,7 +121,7 @@ public class PutSortReducer
         }
       }
       context.setStatus("Read " + map.size() + " entries of " + map.getClass() 
+ "("
-        + StringUtils.humanReadableInt(curSize) + ")");
+        + Strings.humanReadableInt(curSize) + ")");
       int index = 0;
       for (KeyValue kv : map) {
         context.write(row, kv);
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
index b02517451bc..7caf98a856b 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
@@ -46,7 +46,6 @@ import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.net.DNS;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -171,7 +170,7 @@ public abstract class TableInputFormatBase extends 
InputFormat<ImmutableBytesWri
       throw new IOException(INITIALIZATION_ERROR, exception);
     }
     TableSplit tSplit = (TableSplit) split;
-    LOG.info("Input split length: " + 
StringUtils.humanReadableInt(tSplit.getLength()) + " bytes.");
+    LOG.info("Input split length: " + 
Strings.humanReadableInt(tSplit.getLength()) + " bytes.");
     final TableRecordReader trr =
       this.tableRecordReader != null ? this.tableRecordReader : new 
TableRecordReader();
     Scan sc = new Scan(this.scan);
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
index 2fba0197858..e5af25d0f70 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
@@ -35,10 +35,10 @@ import org.apache.hadoop.hbase.TagType;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.security.visibility.InvalidLabelException;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Counter;
 import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 
 /**
@@ -187,7 +187,7 @@ public class TextSortReducer
         }
       }
       context.setStatus("Read " + kvs.size() + " entries of " + kvs.getClass() 
+ "("
-        + StringUtils.humanReadableInt(curSize) + ")");
+        + Strings.humanReadableInt(curSize) + ")");
       int index = 0;
       for (KeyValue kv : kvs) {
         context.write(rowKey, kv);
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
index af9cc4fea93..856cb5e9a67 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
@@ -58,6 +58,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.HFileArchiveUtil;
 import org.apache.hadoop.hbase.util.Pair;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.NullWritable;
@@ -235,7 +236,7 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
       // Use the default block size of the outputFs if bigger
       int defaultBlockSize = Math.max((int) 
outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);
       bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);
-      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));
+      LOG.info("Using bufferSize=" + Strings.humanReadableInt(bufferSize));
 
       for (Counter c : Counter.values()) {
         context.getCounter(c).increment(0);
@@ -342,10 +343,9 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
 
         long etime = EnvironmentEdgeManager.currentTime();
         LOG.info("copy completed for input=" + inputPath + " output=" + 
outputPath);
-        LOG
-          .info("size=" + totalBytesWritten + " (" + 
StringUtils.humanReadableInt(totalBytesWritten)
-            + ")" + " time=" + StringUtils.formatTimeDiff(etime, stime) + 
String
-              .format(" %.3fM/sec", (totalBytesWritten / ((etime - stime) / 
1000.0)) / 1048576.0));
+        LOG.info("size=" + totalBytesWritten + " (" + 
Strings.humanReadableInt(totalBytesWritten)
+          + ")" + " time=" + StringUtils.formatTimeDiff(etime, stime) + 
String.format(" %.3fM/sec",
+            (totalBytesWritten / ((etime - stime) / 1000.0)) / 1048576.0));
         context.getCounter(Counter.FILES_COPIED).increment(1);
 
         // Try to Preserve attributes
@@ -437,7 +437,7 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
       final Path outputPath, final FSDataOutputStream out, final long 
inputFileSize)
       throws IOException {
       final String statusMessage =
-        "copied %s/" + StringUtils.humanReadableInt(inputFileSize) + " 
(%.1f%%)";
+        "copied %s/" + Strings.humanReadableInt(inputFileSize) + " (%.1f%%)";
 
       try {
         byte[] buffer = new byte[bufferSize];
@@ -452,8 +452,8 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
 
           if (reportBytes >= REPORT_SIZE) {
             context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);
-            context.setStatus(
-              String.format(statusMessage, 
StringUtils.humanReadableInt(totalBytesWritten),
+            context
+              .setStatus(String.format(statusMessage, 
Strings.humanReadableInt(totalBytesWritten),
                 (totalBytesWritten / (float) inputFileSize) * 100.0f) + " from 
" + inputPath
                 + " to " + outputPath);
             reportBytes = 0;
@@ -461,10 +461,9 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
         }
 
         context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);
-        context
-          .setStatus(String.format(statusMessage, 
StringUtils.humanReadableInt(totalBytesWritten),
-            (totalBytesWritten / (float) inputFileSize) * 100.0f) + " from " + 
inputPath + " to "
-            + outputPath);
+        context.setStatus(String.format(statusMessage, 
Strings.humanReadableInt(totalBytesWritten),
+          (totalBytesWritten / (float) inputFileSize) * 100.0f) + " from " + 
inputPath + " to "
+          + outputPath);
 
         return totalBytesWritten;
       } finally {
@@ -755,7 +754,7 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
 
     if (LOG.isDebugEnabled()) {
       for (int i = 0; i < sizeGroups.length; ++i) {
-        LOG.debug("export split=" + i + " size=" + 
StringUtils.humanReadableInt(sizeGroups[i]));
+        LOG.debug("export split=" + i + " size=" + 
Strings.humanReadableInt(sizeGroups[i]));
       }
     }
 
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
index af39a7521cd..aa188548c87 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
@@ -36,11 +36,11 @@ import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
 import org.apache.hadoop.hbase.mapreduce.TableMapper;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
 import org.apache.hadoop.hbase.util.CommonFSUtils;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.yetus.audience.InterfaceAudience;
 
@@ -125,8 +125,8 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
       .println("total time to open: " + 
fileOpenTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
     System.out.println("total time to read: " + 
streamTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
     System.out.println(
-      "total bytes: " + totalBytes + " bytes (" + 
StringUtils.humanReadableInt(totalBytes) + ")");
-    System.out.println("throghput  : " + StringUtils.humanReadableInt((long) 
throughput) + "B/s");
+      "total bytes: " + totalBytes + " bytes (" + 
Strings.humanReadableInt(totalBytes) + ")");
+    System.out.println("throghput  : " + Strings.humanReadableInt((long) 
throughput) + "B/s");
   }
 
   private Scan getScan() {
@@ -189,14 +189,14 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
     System.out.println("Scan metrics:\n" + metrics.getMetricsMap());
 
     System.out.println(
-      "total bytes: " + totalBytes + " bytes (" + 
StringUtils.humanReadableInt(totalBytes) + ")");
-    System.out.println("throughput  : " + StringUtils.humanReadableInt((long) 
throughput) + "B/s");
+      "total bytes: " + totalBytes + " bytes (" + 
Strings.humanReadableInt(totalBytes) + ")");
+    System.out.println("throughput  : " + Strings.humanReadableInt((long) 
throughput) + "B/s");
     System.out.println("total rows  : " + numRows);
     System.out
-      .println("throughput  : " + StringUtils.humanReadableInt((long) 
throughputRows) + " rows/s");
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputRows) + " rows/s");
     System.out.println("total cells : " + numCells);
-    System.out.println(
-      "throughput  : " + StringUtils.humanReadableInt((long) throughputCells) 
+ " cells/s");
+    System.out
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputCells) + " cells/s");
   }
 
   public void testSnapshotScan() throws IOException {
@@ -246,14 +246,14 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
     System.out.println("Scan metrics:\n" + metrics.getMetricsMap());
 
     System.out.println(
-      "total bytes: " + totalBytes + " bytes (" + 
StringUtils.humanReadableInt(totalBytes) + ")");
-    System.out.println("throughput  : " + StringUtils.humanReadableInt((long) 
throughput) + "B/s");
+      "total bytes: " + totalBytes + " bytes (" + 
Strings.humanReadableInt(totalBytes) + ")");
+    System.out.println("throughput  : " + Strings.humanReadableInt((long) 
throughput) + "B/s");
     System.out.println("total rows  : " + numRows);
     System.out
-      .println("throughput  : " + StringUtils.humanReadableInt((long) 
throughputRows) + " rows/s");
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputRows) + " rows/s");
     System.out.println("total cells : " + numCells);
-    System.out.println(
-      "throughput  : " + StringUtils.humanReadableInt((long) throughputCells) 
+ " cells/s");
+    System.out
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputCells) + " cells/s");
 
   }
 
@@ -311,14 +311,14 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
     System.out.println("total time to scan: " + 
scanTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
 
     System.out.println(
-      "total bytes: " + totalBytes + " bytes (" + 
StringUtils.humanReadableInt(totalBytes) + ")");
-    System.out.println("throughput  : " + StringUtils.humanReadableInt((long) 
throughput) + "B/s");
+      "total bytes: " + totalBytes + " bytes (" + 
Strings.humanReadableInt(totalBytes) + ")");
+    System.out.println("throughput  : " + Strings.humanReadableInt((long) 
throughput) + "B/s");
     System.out.println("total rows  : " + numRows);
     System.out
-      .println("throughput  : " + StringUtils.humanReadableInt((long) 
throughputRows) + " rows/s");
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputRows) + " rows/s");
     System.out.println("total cells : " + numCells);
-    System.out.println(
-      "throughput  : " + StringUtils.humanReadableInt((long) throughputCells) 
+ " cells/s");
+    System.out
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputCells) + " cells/s");
   }
 
   public void testSnapshotScanMapReduce()
@@ -362,14 +362,14 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
     System.out.println("total time to scan: " + 
scanTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
 
     System.out.println(
-      "total bytes: " + totalBytes + " bytes (" + 
StringUtils.humanReadableInt(totalBytes) + ")");
-    System.out.println("throughput  : " + StringUtils.humanReadableInt((long) 
throughput) + "B/s");
+      "total bytes: " + totalBytes + " bytes (" + 
Strings.humanReadableInt(totalBytes) + ")");
+    System.out.println("throughput  : " + Strings.humanReadableInt((long) 
throughput) + "B/s");
     System.out.println("total rows  : " + numRows);
     System.out
-      .println("throughput  : " + StringUtils.humanReadableInt((long) 
throughputRows) + " rows/s");
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputRows) + " rows/s");
     System.out.println("total cells : " + numCells);
-    System.out.println(
-      "throughput  : " + StringUtils.humanReadableInt((long) throughputCells) 
+ " cells/s");
+    System.out
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputCells) + " cells/s");
   }
 
   @Override
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
index 886be7801a2..4745ec7d052 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
@@ -62,6 +62,7 @@ org.apache.hadoop.hbase.util.CommonFSUtils;
 org.apache.hadoop.hbase.util.JvmVersion;
 org.apache.hadoop.hbase.util.PrettyPrinter;
 org.apache.hadoop.util.StringUtils;
+org.apache.hadoop.hbase.util.Strings;
 </%import>
 
 <%if format.equals("json") %>
@@ -756,7 +757,7 @@ AssignmentManager assignmentManager = 
master.getAssignmentManager();
         <td><% peerConfig.getReplicationEndpointImpl() %></td>
         <td><% peer.isEnabled() ? "ENABLED" : "DISABLED" %></td>
         <td><% peerConfig.isSerial() %></td>
-        <td><% peerConfig.getBandwidth() == 0? "UNLIMITED" : 
StringUtils.humanReadableInt(peerConfig.getBandwidth()) %></td>
+        <td><% peerConfig.getBandwidth() == 0? "UNLIMITED" : 
Strings.humanReadableInt(peerConfig.getBandwidth()) %></td>
         <td><% peerConfig.replicateAllUserTables() %></td>
         <td>
            <% peerConfig.getNamespaces() == null ? "" : 
ReplicationPeerConfigUtil.convertToString(peerConfig.getNamespaces()).replaceAll(";",
 "; ") %>
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
index 8897a728288..6a53d38924f 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
@@ -45,8 +45,8 @@ import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ClassSize;
 import org.apache.hadoop.hbase.util.ObjectIntPair;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.io.WritableUtils;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -1093,8 +1093,8 @@ public class HFileBlockIndex {
         LOG.trace("Wrote a " + numLevels + "-level index with root level at 
pos "
           + rootLevelIndexPos + ", " + rootChunk.getNumEntries() + " 
root-level entries, "
           + totalNumEntries + " total entries, "
-          + StringUtils.humanReadableInt(this.totalBlockOnDiskSize) + " 
on-disk size, "
-          + StringUtils.humanReadableInt(totalBlockUncompressedSize) + " total 
uncompressed size.");
+          + Strings.humanReadableInt(this.totalBlockOnDiskSize) + " on-disk 
size, "
+          + Strings.humanReadableInt(totalBlockUncompressedSize) + " total 
uncompressed size.");
       }
       return rootLevelIndexPos;
     }
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
index 89481161f4a..44241d92895 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
@@ -21,9 +21,9 @@ import java.io.IOException;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.hbase.wal.WALEdit;
 import org.apache.hadoop.hbase.wal.WALKey;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -63,7 +63,7 @@ public class MetricsWAL implements WALActionsListener {
     if (time > 1000) {
       source.incrementSlowAppendCount();
       LOG.warn(String.format("%s took %d ms appending an edit to wal; len~=%s",
-        Thread.currentThread().getName(), time, 
StringUtils.humanReadableInt(size)));
+        Thread.currentThread().getName(), time, 
Strings.humanReadableInt(size)));
     }
   }
 
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
index fb48b03d69a..151319e165a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
@@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.io.HFileLink;
 import org.apache.hadoop.hbase.io.WALLink;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
 import org.apache.hadoop.hbase.util.CommonFSUtils;
-import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -525,7 +525,7 @@ public final class SnapshotInfo extends AbstractHBaseTool {
   }
 
   private String fileSizeToString(long size) {
-    return printSizeInBytes ? Long.toString(size) : 
StringUtils.humanReadableInt(size);
+    return printSizeInBytes ? Long.toString(size) : 
Strings.humanReadableInt(size);
   }
 
   @Override
diff --git 
a/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
index 6543d3b849e..0da74ebf79e 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
@@ -21,29 +21,17 @@
   import="static org.apache.commons.lang3.StringEscapeUtils.escapeXml"
   import="java.util.Collections"
   import="java.util.Comparator"
-  import="java.util.ArrayList"
   import="java.util.Date"
   import="java.util.List"
-  import="java.util.Set"
   import="org.apache.hadoop.hbase.master.HMaster"
   import="org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv"
   import="org.apache.hadoop.hbase.procedure2.LockedResource"
   import="org.apache.hadoop.hbase.procedure2.Procedure"
   import="org.apache.hadoop.hbase.procedure2.ProcedureExecutor"
-  import="org.apache.hadoop.hbase.procedure2.util.StringUtils"
-  import="org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix"
 %>
-<%@ page 
import="org.apache.hadoop.hbase.master.procedure.ServerCrashProcedure" %>
-<%@ page 
import="org.apache.hadoop.hbase.master.assignment.TransitRegionStateProcedure" 
%>
-<%@ page 
import="org.apache.hadoop.hbase.master.assignment.OpenRegionProcedure" %>
-<%@ page 
import="org.apache.hadoop.hbase.master.assignment.CloseRegionProcedure" %>
-<%@ page import="org.apache.hadoop.hbase.metrics.OperationMetrics" %>
 <%@ page import="java.util.Map" %>
-<%@ page import="java.util.HashMap" %>
-<%@ page 
import="org.apache.hadoop.hbase.master.MetricsAssignmentManagerSource" %>
 <%@ page import="org.apache.hadoop.hbase.master.MetricsAssignmentManager" %>
 <%@ page import="org.apache.hadoop.hbase.procedure2.ProcedureMetrics" %>
-<%@ page import="org.apache.hadoop.hbase.metrics.Snapshot" %>
 <%@ page import="org.apache.hadoop.hbase.metrics.Histogram" %>
 <%@ page import="java.util.TreeMap" %>
 <%@ page import="org.apache.hadoop.hbase.metrics.impl.HistogramImpl" %>
@@ -90,7 +78,7 @@
       <h1>Procedure Time Statistics</h1>
     </div>
   </div>
-  <p>We list proceduces completed successfully of the following types only: 
ServerCrashProcedure, TransitRegionStateProcedure,
+  <p>We list procedures completed successfully of the following types only: 
ServerCrashProcedure, TransitRegionStateProcedure,
     OpenRegionProcedure, CloseRegionProcedure.</p>
   <table class="table table-striped" width="90%" >
     <tr>
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/quotas.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/quotas.jsp
index a52085b529f..575994f50c1 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/quotas.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/quotas.jsp
@@ -21,7 +21,6 @@
   import="java.util.concurrent.TimeUnit"
   import="java.util.ArrayList"
   import="java.util.List"
-  import="org.apache.hadoop.conf.Configuration"
   import="org.apache.hadoop.hbase.master.HMaster"
   import="org.apache.hadoop.hbase.quotas.MasterQuotaManager"
   import="org.apache.hadoop.hbase.quotas.QuotaRetriever"
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/rsgroup.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/rsgroup.jsp
index 6ba6f78a273..ec7ec5dda6b 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/rsgroup.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/rsgroup.jsp
@@ -169,7 +169,7 @@
                      totalRequestsPerSecond += sl.getRequestCountPerSecond();
                      lastContact = (System.currentTimeMillis() - 
sl.getReportTimestamp())/1000;
                    }
-                   long startcode = serverName.getStartcode();
+                   long startcode = serverName.getStartCode();
                    int infoPort = master.getRegionServerInfoPort(serverName);
                    String url = "//" + serverName.getHostname() + ":" + 
infoPort + "/rs-status";%>
                    <tr>
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp
index e85cab95d7e..35b2967fc1c 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp
@@ -26,7 +26,7 @@
   import="org.apache.hadoop.hbase.master.HMaster"
   import="org.apache.hadoop.hbase.snapshot.SnapshotInfo"
   import="org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils"
-  import="org.apache.hadoop.util.StringUtils"
+  import="org.apache.hadoop.hbase.util.Strings"
   import="org.apache.hadoop.hbase.TableName"
 %>
 <%
@@ -134,14 +134,14 @@
   <div class="row">
     <div class="span12">
     <%= stats.getStoreFilesCount() %> HFiles (<%= 
stats.getArchivedStoreFilesCount() %> in archive),
-    total size <%= StringUtils.humanReadableInt(stats.getStoreFilesSize()) %>
+    total size <%= Strings.humanReadableInt(stats.getStoreFilesSize()) %>
     (<%= stats.getSharedStoreFilePercentage() %>&#37;
-    <%= StringUtils.humanReadableInt(stats.getSharedStoreFilesSize()) %> 
shared with the source
+    <%= Strings.humanReadableInt(stats.getSharedStoreFilesSize()) %> shared 
with the source
     table)
     </div>
     <div class="span12">
     <%= stats.getLogsCount() %> Logs, total size
-    <%= StringUtils.humanReadableInt(stats.getLogsSize()) %>
+    <%= Strings.humanReadableInt(stats.getLogsSize()) %>
     </div>
   </div>
   <% if (stats.isSnapshotCorrupted()) { %>
diff --git 
a/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp
index 6202d7409b5..bc913188282 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp
@@ -28,7 +28,7 @@
   import="org.apache.hadoop.hbase.snapshot.SnapshotInfo"
   import="org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils"
   import="org.apache.hadoop.hbase.TableName"
-  import="org.apache.hadoop.util.StringUtils"
+  import="org.apache.hadoop.hbase.util.Strings"
   
import="org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription"
 %>
 <%@ page import="org.apache.hadoop.hbase.util.PrettyPrinter" %>
@@ -99,18 +99,18 @@
       <td>
         <%= SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDesc.getTtl(), 
snapshotDesc.getCreationTime(), System.currentTimeMillis()) ? "Yes" : "No" %>
       </td>
-      <td><%= StringUtils.humanReadableInt(stats.getSharedStoreFilesSize()) 
%></td>
-      <td><%= StringUtils.humanReadableInt(stats.getMobStoreFilesSize())  
%></td>
-      <td><%= StringUtils.humanReadableInt(stats.getArchivedStoreFileSize()) %>
-        (<%= 
StringUtils.humanReadableInt(stats.getNonSharedArchivedStoreFilesSize()) 
%>)</td>
+      <td><%= Strings.humanReadableInt(stats.getSharedStoreFilesSize()) %></td>
+      <td><%= Strings.humanReadableInt(stats.getMobStoreFilesSize())  %></td>
+      <td><%= Strings.humanReadableInt(stats.getArchivedStoreFileSize()) %>
+        (<%= 
Strings.humanReadableInt(stats.getNonSharedArchivedStoreFilesSize()) %>)</td>
     </tr>
     <% } %>
     <p><%= snapshots.size() %> snapshot(s) in set.</p>
-    <p>Total Storefile Size: <%= StringUtils.humanReadableInt(totalSize) %></p>
-    <p>Total Shared Storefile Size: <%= 
StringUtils.humanReadableInt(totalSharedSize.get()) %>,
-       Total Mob Storefile Size: <%= 
StringUtils.humanReadableInt(totalMobSize.get()) %>,
-       Total Archived Storefile Size: <%= 
StringUtils.humanReadableInt(totalArchivedSize.get()) %>
-       (<%= StringUtils.humanReadableInt(totalUnsharedArchivedSize) %>)</p>
+    <p>Total Storefile Size: <%= Strings.humanReadableInt(totalSize) %></p>
+    <p>Total Shared Storefile Size: <%= 
Strings.humanReadableInt(totalSharedSize.get()) %>,
+       Total Mob Storefile Size: <%= 
Strings.humanReadableInt(totalMobSize.get()) %>,
+       Total Archived Storefile Size: <%= 
Strings.humanReadableInt(totalArchivedSize.get()) %>
+       (<%= Strings.humanReadableInt(totalUnsharedArchivedSize) %>)</p>
     <p>Shared Storefile Size is the Storefile size shared between snapshots 
and active tables.
        Mob Storefile Size is the Mob Storefile size shared between snapshots 
and active tables.
        Archived Storefile Size is the Storefile size in Archive.
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
index 6ae98b6985e..d40bc64edd8 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
@@ -76,6 +76,8 @@
 <%@ page 
import="org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota"
 %>
 <%@ page import="java.net.URLEncoder" %>
 <%@ page import="java.util.stream.Collectors" %>
+<%@ page import="java.nio.charset.StandardCharsets" %>
+<%@ page import="java.io.UnsupportedEncodingException" %>
 <%!
   /**
    * @return An empty region load stamped with the passed in 
<code>regionInfo</code>
@@ -115,7 +117,7 @@
    * @return an <td> tag contents server name links to server rs-status page.
    */
   private static String buildRegionDeployedServerTag(RegionInfo regionInfo, 
HMaster master,
-    Map<RegionInfo, ServerName> regionsToServer) {
+                                                     Map<RegionInfo, 
ServerName> regionsToServer) throws UnsupportedEncodingException {
     ServerName serverName = regionsToServer.get(regionInfo);
 
     if (serverName == null) {
@@ -123,7 +125,7 @@
     }
 
     String hostName = serverName.getHostname();
-    String hostNameEncoded = URLEncoder.encode(hostName);
+    String hostNameEncoded = URLEncoder.encode(hostName, 
StandardCharsets.UTF_8.toString());
     // This port might be wrong if RS actually ended up using something else.
     int serverInfoPort = master.getRegionServerInfoPort(serverName);
     String urlRegionServer = "//" + hostNameEncoded + ":" + serverInfoPort + 
"/rs-status";
@@ -135,9 +137,9 @@
   /**
    * @return an <p> tag guide user to see all region messages.
    */
-  private static String moreRegionsToRender(int numRegionsRendered, int 
numRegions, String fqtn) {
+  private static String moreRegionsToRender(int numRegionsRendered, int 
numRegions, String fqtn) throws UnsupportedEncodingException {
     if (numRegions > numRegionsRendered) {
-      String allRegionsUrl = "?name=" + URLEncoder.encode(fqtn) + 
"&numRegions=all";
+      String allRegionsUrl = "?name=" + URLEncoder.encode(fqtn, 
StandardCharsets.UTF_8.toString()) + "&numRegions=all";
 
       return "This table has <b>" + numRegions
         + "</b> regions in total, in order to improve the page load time, only 
<b>"
@@ -348,7 +350,7 @@
               if (metaLocation != null) {
                 ServerMetrics sl = 
master.getServerManager().getLoad(metaLocation);
                 // The host name portion should be safe, but I don't know how 
we handle IDNs so err on the side of failing safely.
-                hostAndPort = URLEncoder.encode(metaLocation.getHostname()) + 
":" + master.getRegionServerInfoPort(metaLocation);
+                hostAndPort = URLEncoder.encode(metaLocation.getHostname(), 
StandardCharsets.UTF_8.toString()) + ":" + 
master.getRegionServerInfoPort(metaLocation);
                 if (sl != null) {
                   Map<byte[], RegionMetrics> map = sl.getRegionMetrics();
                   if (map.containsKey(meta.getRegionName())) {
@@ -417,7 +419,7 @@
                float localityForSsd = 0.0f;
                if (metaLocation != null) {
                  ServerMetrics sl = 
master.getServerManager().getLoad(metaLocation);
-                 hostAndPort = URLEncoder.encode(metaLocation.getHostname()) + 
":" + master.getRegionServerInfoPort(metaLocation);
+                 hostAndPort = URLEncoder.encode(metaLocation.getHostname(), 
StandardCharsets.UTF_8.toString()) + ":" + 
master.getRegionServerInfoPort(metaLocation);
                  if (sl != null) {
                    Map<byte[], RegionMetrics> map = sl.getRegionMetrics();
                    if (map.containsKey(meta.getRegionName())) {
@@ -469,7 +471,7 @@
               String compactionProgress = "";
               if (metaLocation != null) {
                 ServerMetrics sl = 
master.getServerManager().getLoad(metaLocation);
-                hostAndPort = URLEncoder.encode(metaLocation.getHostname()) + 
":" + master.getRegionServerInfoPort(metaLocation);
+                hostAndPort = URLEncoder.encode(metaLocation.getHostname(), 
StandardCharsets.UTF_8.toString()) + ":" + 
master.getRegionServerInfoPort(metaLocation);
                 if (sl != null) {
                   Map<byte[], RegionMetrics> map = sl.getRegionMetrics();
                   if (map.containsKey(meta.getRegionName())) {
@@ -1036,11 +1038,9 @@
           numRegionsRendered = 0;
           for (Map.Entry<RegionInfo, RegionMetrics> hriEntry : entryList) {
             RegionInfo regionInfo = hriEntry.getKey();
-            ServerName addr = regionsToServer.get(regionInfo);
             RegionMetrics load = hriEntry.getValue();
             float locality = 0.0f;
             float localityForSsd = 0.0f;
-            String state = "N/A";
             if (load != null) {
               locality = load.getDataLocality();
               localityForSsd = load.getDataLocalityForSsd();
@@ -1124,11 +1124,11 @@
   <%
     for (Map.Entry<ServerName, Integer> rdEntry : regDistribution.entrySet()) {
       ServerName addr = rdEntry.getKey();
-      String url = "//" + URLEncoder.encode(addr.getHostname()) + ":"
+      String url = "//" + URLEncoder.encode(addr.getHostname(), 
StandardCharsets.UTF_8.toString()) + ":"
         + master.getRegionServerInfoPort(addr) + "/rs-status";
   %>
       <tr>
-        <td><a href="<%= url %>"><%= 
StringEscapeUtils.escapeHtml4(addr.getHostname().toString())
+        <td><a href="<%= url %>"><%= 
StringEscapeUtils.escapeHtml4(addr.getHostname())
           + ":" + master.getRegionServerInfoPort(addr) %></a></td>
         <td><%= rdEntry.getValue()%></td>
         <td><%= primaryRegDistribution.get(addr) == null ? 0 : 
primaryRegDistribution.get(addr)%></td>
diff --git 
a/hbase-server/src/main/resources/hbase-webapps/master/tablesDetailed.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/tablesDetailed.jsp
index 2ad44a3d68b..998e61a539b 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/tablesDetailed.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/tablesDetailed.jsp
@@ -19,10 +19,8 @@
 --%>
 <%@ page contentType="text/html;charset=UTF-8"
          import="static org.apache.commons.lang3.StringEscapeUtils.escapeXml"
-         import="java.io.IOException"
          import="java.util.ArrayList"
          import="java.util.List"
-         import="java.util.Map"
 %>
 <%@ page import="org.apache.hadoop.hbase.client.TableDescriptor" %>
 <%@ page import="org.apache.hadoop.hbase.master.HMaster" %>
diff --git 
a/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp 
b/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp
index 162405a022d..e2fdab66e6a 100644
--- a/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp
@@ -29,6 +29,7 @@
   import="org.apache.hadoop.hbase.regionserver.HRegion"
   import="org.apache.hadoop.hbase.regionserver.HStore"
 %>
+<%@ page import="java.nio.charset.StandardCharsets" %>
 <%
   String regionName = request.getParameter("name");
   HRegionServer rs = (HRegionServer) 
getServletContext().getAttribute(HRegionServer.REGIONSERVER);
@@ -87,7 +88,7 @@
             count++; %>
          <tr>
            <td><a href="storeFile.jsp?name=<%= sf.getEncodedPath() %>"><%= 
sf.getPath() %></a></td>
-           <td><%= (int) (fs.getLength(sf.getPath()) / 1024 / 1024) %></td>
+           <td><%= (int) (fs.getFileStatus(sf.getPath()).getLen() / 1024 / 
1024) %></td>
            <td><%= new Date(sf.getModificationTimestamp()) %></td>
          </tr>
          <% } %>
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
index bdc81974f90..e8ea771ae72 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
@@ -72,13 +72,13 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.Pair;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
 import org.apache.hadoop.hbase.wal.WAL;
 import org.apache.hadoop.hbase.wal.WALEdit;
 import org.apache.hadoop.hbase.wal.WALFactory;
 import org.apache.hadoop.hbase.wal.WALKeyImpl;
 import org.apache.hadoop.hbase.wal.WALSplitUtil.MutationReplay;
-import org.apache.hadoop.util.StringUtils;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -386,7 +386,7 @@ public class TestHRegionReplayEvents {
           // assert that the store memstore is smaller now
           long newStoreMemstoreSize = store.getMemStoreSize().getHeapSize();
           LOG.info("Memstore size reduced by:"
-            + StringUtils.humanReadableInt(newStoreMemstoreSize - 
storeMemstoreSize));
+            + Strings.humanReadableInt(newStoreMemstoreSize - 
storeMemstoreSize));
           assertTrue(storeMemstoreSize > newStoreMemstoreSize);
 
         } else if (flushDesc.getAction() == FlushAction.COMMIT_FLUSH) {
@@ -487,7 +487,7 @@ public class TestHRegionReplayEvents {
           // assert that the store memstore is smaller now
           long newStoreMemstoreSize = store.getMemStoreSize().getHeapSize();
           LOG.info("Memstore size reduced by:"
-            + StringUtils.humanReadableInt(newStoreMemstoreSize - 
storeMemstoreSize));
+            + Strings.humanReadableInt(newStoreMemstoreSize - 
storeMemstoreSize));
           assertTrue(storeMemstoreSize > newStoreMemstoreSize);
           verifyData(secondaryRegion, 0, lastReplayed + 1, cq, families);
 
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
index 1638331c288..9bc2f6a5773 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
@@ -27,7 +27,7 @@ import org.apache.commons.lang3.RandomStringUtils;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.regionserver.HStoreFile;
 import org.apache.hadoop.hbase.regionserver.StoreFileReader;
-import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.hbase.util.Strings;
 
 import org.apache.hbase.thirdparty.com.google.common.base.MoreObjects;
 
@@ -71,7 +71,7 @@ class MockStoreFileGenerator {
     // this when selection takes this into account
     when(mockSf.getReader()).thenReturn(reader);
     String toString = 
MoreObjects.toStringHelper("MockStoreFile").add("isReference", false)
-      .add("fileSize", StringUtils.humanReadableInt(sizeInBytes)).add("seqId", 
seqId)
+      .add("fileSize", Strings.humanReadableInt(sizeInBytes)).add("seqId", 
seqId)
       .add("path", stringPath).toString();
     when(mockSf.toString()).thenReturn(toString);
 
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
index 8d5efd2fb95..a42506cfd7c 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
@@ -39,7 +39,6 @@ import org.apache.hadoop.hbase.client.ClusterConnection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
-import org.apache.hadoop.util.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -204,7 +203,7 @@ public abstract class MultiThreadedAction {
           double averageKeysPerSecond = (time > 0) ? (numKeys * 1000 / time) : 
0;
 
           LOG.info(threadsLeft + "Keys=" + numKeys + ", cols="
-            + StringUtils.humanReadableInt(numCols.get()) + ", time=" + 
formatTime(time)
+            + Strings.humanReadableInt(numCols.get()) + ", time=" + 
formatTime(time)
             + ((numKeys > 0 && time > 0)
               ? (" Overall: [" + "keys/s= " + numKeys * 1000 / time + ", 
latency="
                 + String.format("%.2f", (double) totalOpTime / (double) 
numKeys) + " ms]")


Reply via email to