This is an automated email from the ASF dual-hosted git repository.

dineshc pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 97c5a6efbaf0 HADOOP-19041. Use StandardCharsets in more places (#6449)
97c5a6efbaf0 is described below

commit 97c5a6efbaf01dcab55fdff4d24fe59b6cda1c99
Author: PJ Fanning <pjfann...@users.noreply.github.com>
AuthorDate: Fri Mar 29 04:17:18 2024 +0100

    HADOOP-19041. Use StandardCharsets in more places (#6449)
---
 .../security/authentication/util/KerberosUtil.java | 11 +++-------
 .../java/org/apache/hadoop/conf/Configuration.java |  2 +-
 .../main/java/org/apache/hadoop/fs/XAttrCodec.java |  7 +++---
 .../org/apache/hadoop/conf/TestConfiguration.java  |  2 +-
 .../org/apache/hadoop/fs/TestLocalFileSystem.java  |  3 ++-
 .../apache/hadoop/fs/http/client/HttpFSUtils.java  |  2 +-
 .../blockaliasmap/impl/TextFileRegionAliasMap.java |  5 +++--
 .../hadoop/hdfs/server/datanode/BlockReceiver.java |  3 ++-
 .../datanode/fsdataset/impl/BlockPoolSlice.java    |  3 ++-
 .../datanode/fsdataset/impl/FsVolumeImpl.java      |  3 ++-
 .../org/apache/hadoop/hdfs/server/mover/Mover.java |  3 ++-
 .../java/org/apache/hadoop/hdfs/tools/DFSck.java   |  5 +++--
 .../OfflineImageReconstructor.java                 | 14 ++++++------
 .../java/org/apache/hadoop/hdfs/TestDFSShell.java  |  3 ++-
 .../hadoop/hdfs/TestDistributedFileSystem.java     |  3 ++-
 .../org/apache/hadoop/hdfs/UpgradeUtilities.java   |  8 +++----
 .../datanode/fsdataset/impl/TestProvidedImpl.java  |  3 ++-
 .../server/namenode/TestFSPermissionChecker.java   |  7 +++---
 .../hdfs/web/TestWebHdfsFileSystemContract.java    |  4 ++--
 .../hadoop/mapreduce/v2/app/rm/RMCommunicator.java |  2 +-
 .../mapreduce/v2/app/webapp/dao/JobInfo.java       |  2 +-
 .../TestJobResourceUploaderWithSharedCache.java    | 25 +++++++++++-----------
 .../mapred/TestConcatenatedCompressedInput.java    |  2 +-
 .../apache/hadoop/mapred/TestFieldSelection.java   |  3 ++-
 .../hadoop/mapred/TestKeyValueTextInputFormat.java |  5 ++---
 .../apache/hadoop/mapred/TestTextInputFormat.java  |  9 +++-----
 .../mapred/lib/aggregate/TestAggregates.java       |  7 +++---
 .../lib/aggregate/TestMapReduceAggregates.java     |  7 +++---
 .../lib/input/TestMRKeyValueTextInputFormat.java   |  5 ++---
 .../hadoop/maven/plugin/cmakebuilder/TestMojo.java |  3 ++-
 .../org/apache/hadoop/tools/HadoopArchives.java    |  2 +-
 .../fs/azurebfs/services/SharedKeyCredentials.java |  8 ++-----
 .../ITestAzureBlobFileSystemDelegationSAS.java     |  2 +-
 .../namenode/ITestProvidedImplementation.java      |  3 ++-
 .../hadoop/mapred/gridmix/ExecutionSummarizer.java |  2 +-
 .../org/apache/hadoop/streaming/StreamUtil.java    |  2 +-
 .../hadoop/streaming/TestStreamDataProtocol.java   |  7 ++----
 .../hadoop/streaming/TestStreamReduceNone.java     |  7 ++----
 .../streaming/TestStreamXmlRecordReader.java       |  7 +++---
 .../org/apache/hadoop/streaming/TestStreaming.java |  3 ++-
 .../hadoop/streaming/TestStreamingKeyValue.java    |  3 ++-
 .../hadoop/streaming/TestStreamingSeparator.java   |  7 ++----
 .../hadoop/streaming/TestTypedBytesStreaming.java  |  3 ++-
 .../mapreduce/TestStreamXmlRecordReader.java       |  6 +++---
 .../apache/hadoop/yarn/client/cli/TestTopCLI.java  |  3 ++-
 .../apache/hadoop/yarn/client/cli/TestYarnCLI.java |  3 ++-
 .../logaggregation/TestAggregatedLogFormat.java    |  5 +++--
 .../TestApplicationHistoryServer.java              |  3 ++-
 .../amrmproxy/FederationInterceptor.java           |  7 +++---
 .../resources/CGroupsBlkioResourceHandlerImpl.java |  5 +++--
 .../resources/CGroupsCpuResourceHandlerImpl.java   |  3 ++-
 .../linux/resources/CGroupsHandlerImpl.java        |  9 ++++----
 .../linux/resources/TrafficController.java         |  7 +++---
 .../runtime/DefaultLinuxContainerRuntime.java      |  7 +++---
 .../linux/runtime/docker/DockerClient.java         |  9 ++++----
 .../gpu/NvidiaDockerV1CommandPlugin.java           |  3 ++-
 .../util/CgroupsLCEResourcesHandler.java           |  7 +++---
 .../nodemanager/util/ProcessIdFileReader.java      |  5 +++--
 .../converter/FSConfigToCSConfigConverter.java     |  2 +-
 .../webapp/TestRMWebServicesAppsModification.java  |  4 ++--
 ...RMWebServicesDelegationTokenAuthentication.java |  9 ++++----
 ...TestRMWebServicesHttpStaticUserPermissions.java |  3 ++-
 62 files changed, 166 insertions(+), 151 deletions(-)

diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
index 5125be078d67..f1517d65bd87 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
@@ -21,12 +21,11 @@ import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
 
 import java.io.File;
 import java.io.IOException;
-import java.io.UnsupportedEncodingException;
 import java.lang.reflect.InvocationTargetException;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.nio.ByteBuffer;
-import java.nio.charset.IllegalCharsetNameException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.Iterator;
@@ -426,12 +425,8 @@ public class KerberosUtil {
     }
 
     String getAsString() {
-      try {
-        return new String(bb.array(), bb.arrayOffset() + bb.position(),
-            bb.remaining(), "UTF-8");
-      } catch (UnsupportedEncodingException e) {
-        throw new IllegalCharsetNameException("UTF-8"); // won't happen.
-      }
+      return new String(bb.array(), bb.arrayOffset() + bb.position(),
+          bb.remaining(), StandardCharsets.UTF_8);
     }
 
     @Override
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
index ea3d6dc74e4a..8fc3a696c4aa 100755
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
@@ -3565,7 +3565,7 @@ public class Configuration implements 
Iterable<Map.Entry<String,String>>,
    * @throws IOException raised on errors performing I/O.
    */
   public void writeXml(OutputStream out) throws IOException {
-    writeXml(new OutputStreamWriter(out, "UTF-8"));
+    writeXml(new OutputStreamWriter(out, StandardCharsets.UTF_8));
   }
 
   public void writeXml(Writer out) throws IOException {
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java
index df878d998706..45636c566608 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.fs;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.commons.codec.DecoderException;
 import org.apache.commons.codec.binary.Base64;
@@ -76,7 +77,7 @@ public enum XAttrCodec {
         String en = value.substring(0, 2);
         if (value.startsWith("\"") && value.endsWith("\"")) {
           value = value.substring(1, value.length()-1);
-          result = value.getBytes("utf-8");
+          result = value.getBytes(StandardCharsets.UTF_8);
         } else if (en.equalsIgnoreCase(HEX_PREFIX)) {
           value = value.substring(2, value.length());
           try {
@@ -90,7 +91,7 @@ public enum XAttrCodec {
         }
       }
       if (result == null) {
-        result = value.getBytes("utf-8");
+        result = value.getBytes(StandardCharsets.UTF_8);
       }
     }
     return result;
@@ -114,7 +115,7 @@ public enum XAttrCodec {
     } else if (encoding == BASE64) {
       return BASE64_PREFIX + base64.encodeToString(value);
     } else {
-      return "\"" + new String(value, "utf-8") + "\"";
+      return "\"" + new String(value, StandardCharsets.UTF_8) + "\"";
     }
   }
 }
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
index b3487ef309fc..e70cc6d8b18e 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
@@ -387,7 +387,7 @@ public class TestConfiguration {
       String name = "multi_byte_\u611b_name";
       String value = "multi_byte_\u0641_value";
       out = new BufferedWriter(new OutputStreamWriter(
-        new FileOutputStream(CONFIG_MULTI_BYTE), "UTF-8"));
+        new FileOutputStream(CONFIG_MULTI_BYTE), StandardCharsets.UTF_8));
       startConfig();
       declareProperty(name, value, value);
       endConfig();
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
index 38e16221a451..79049d383713 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
@@ -32,6 +32,7 @@ import static org.apache.hadoop.fs.FileSystemTestHelper.*;
 
 import java.io.*;
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.EnumSet;
@@ -673,7 +674,7 @@ public class TestLocalFileSystem {
           fileSys.createFile(path).recursive();
       FSDataOutputStream out = builder.build();
       String content = "Create with a generic type of createFile!";
-      byte[] contentOrigin = content.getBytes("UTF8");
+      byte[] contentOrigin = content.getBytes(StandardCharsets.UTF_8);
       out.write(contentOrigin);
       out.close();
 
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java
 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java
index bd9baaa93f41..f1ddddd4cbbd 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java
@@ -106,7 +106,7 @@ public class HttpFSUtils {
     if (multiValuedParams != null) {
       for (Map.Entry<String, List<String>> multiValuedEntry : 
         multiValuedParams.entrySet()) {
-        String name = URLEncoder.encode(multiValuedEntry.getKey(), "UTF8");
+        String name = URLEncoder.encode(multiValuedEntry.getKey(), "UTF-8");
         List<String> values = multiValuedEntry.getValue();
         for (String value : values) {
           sb.append(separator).append(name).append("=").
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java
index e94e48ed0f7b..0df38d520438 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java
@@ -26,6 +26,7 @@ import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Base64;
 import java.util.Iterator;
@@ -148,7 +149,7 @@ public class TextFileRegionAliasMap
     }
     OutputStream tmp = fs.create(file);
     java.io.Writer out = new BufferedWriter(new OutputStreamWriter(
-          (null == codec) ? tmp : codec.createOutputStream(tmp), "UTF-8"));
+          (null == codec) ? tmp : codec.createOutputStream(tmp), 
StandardCharsets.UTF_8));
     return new TextWriter(out, delim);
   }
 
@@ -379,7 +380,7 @@ public class TextFileRegionAliasMap
       FRIterator i = new FRIterator();
       try {
         BufferedReader r =
-            new BufferedReader(new InputStreamReader(createStream(), "UTF-8"));
+            new BufferedReader(new InputStreamReader(createStream(), 
StandardCharsets.UTF_8));
         iterators.put(i, r);
         i.pending = nextInternal(i);
       } catch (IOException e) {
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
index 1d34c773e625..263241a4b9ff 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
@@ -29,6 +29,7 @@ import java.io.InterruptedIOException;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayDeque;
 import java.util.Arrays;
 import java.util.Queue;
@@ -1063,7 +1064,7 @@ class BlockReceiver implements Closeable {
           // send a special ack upstream.
           if (datanode.isRestarting() && isClient && !isTransfer) {
             try (Writer out = new OutputStreamWriter(
-                replicaInfo.createRestartMetaStream(), "UTF-8")) {
+                replicaInfo.createRestartMetaStream(), 
StandardCharsets.UTF_8)) {
               // write out the current time.
               out.write(Long.toString(Time.now() + restartBudget));
               out.flush();
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java
index 4611df765f99..8c643e9e16ac 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java
@@ -28,6 +28,7 @@ import java.io.InputStream;
 import java.io.OutputStreamWriter;
 import java.io.RandomAccessFile;
 import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -399,7 +400,7 @@ public class BlockPoolSlice {
     try {
       long used = getDfsUsed();
       try (Writer out = new OutputStreamWriter(
-          Files.newOutputStream(outFile.toPath()), "UTF-8")) {
+          Files.newOutputStream(outFile.toPath()), StandardCharsets.UTF_8)) {
         // mtime is written last, so that truncated writes won't be valid.
         out.write(Long.toString(used) + " " + Long.toString(timer.now()));
         // This is only called as part of the volume shutdown.
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java
index 2935e6ae3221..47f0a3556aad 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java
@@ -27,6 +27,7 @@ import java.io.OutputStreamWriter;
 import java.io.RandomAccessFile;
 import java.net.URI;
 import java.nio.channels.ClosedChannelException;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Paths;
 import java.nio.file.StandardCopyOption;
 import java.util.Collection;
@@ -929,7 +930,7 @@ public class FsVolumeImpl implements FsVolumeSpi {
       boolean success = false;
       try (BufferedWriter writer = new BufferedWriter(
           new OutputStreamWriter(fileIoProvider.getFileOutputStream(
-              FsVolumeImpl.this, getTempSaveFile()), "UTF-8"))) {
+              FsVolumeImpl.this, getTempSaveFile()), StandardCharsets.UTF_8))) 
{
         WRITER.writeValue(writer, state);
         success = true;
       } finally {
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java
index d8fb81b2adb7..63fe238cd5e0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java
@@ -66,6 +66,7 @@ import java.io.IOException;
 import java.io.InputStreamReader;
 import java.net.InetSocketAddress;
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.text.DateFormat;
 import java.util.*;
 import java.util.concurrent.TimeUnit;
@@ -740,7 +741,7 @@ public class Mover {
     private static String[] readPathFile(String file) throws IOException {
       List<String> list = Lists.newArrayList();
       BufferedReader reader = new BufferedReader(
-          new InputStreamReader(new FileInputStream(file), "UTF-8"));
+          new InputStreamReader(new FileInputStream(file), 
StandardCharsets.UTF_8));
       try {
         String line;
         while ((line = reader.readLine()) != null) {
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java
index 9f0288ebf3f3..a0da4eaf8053 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java
@@ -26,6 +26,7 @@ import java.net.URI;
 import java.net.URL;
 import java.net.URLConnection;
 import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
 import java.security.PrivilegedExceptionAction;
 import java.util.concurrent.TimeUnit;
 
@@ -207,7 +208,7 @@ public class DFSck extends Configured implements Tool {
       }
       InputStream stream = connection.getInputStream();
       BufferedReader input = new BufferedReader(new InputStreamReader(
-          stream, "UTF-8"));
+          stream, StandardCharsets.UTF_8));
       try {
         String line = null;
         while ((line = input.readLine()) != null) {
@@ -376,7 +377,7 @@ public class DFSck extends Configured implements Tool {
     }
     InputStream stream = connection.getInputStream();
     BufferedReader input = new BufferedReader(new InputStreamReader(
-                                              stream, "UTF-8"));
+                                              stream, StandardCharsets.UTF_8));
     String line = null;
     String lastLine = NamenodeFsck.CORRUPT_STATUS;
     int errCode = -1;
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java
index 0c075ff6dac7..b66fad834c47 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java
@@ -622,7 +622,7 @@ class OfflineImageReconstructor {
     inodeBld.setId(id);
     String name = node.removeChildStr(SECTION_NAME);
     if (name != null) {
-      inodeBld.setName(ByteString.copyFrom(name, "UTF8"));
+      inodeBld.setName(ByteString.copyFrom(name, StandardCharsets.UTF_8));
     }
     switch (type) {
     case "FILE":
@@ -838,7 +838,7 @@ class OfflineImageReconstructor {
     }
     String target = node.removeChildStr(INODE_SECTION_TARGET);
     if (target != null) {
-      bld.setTarget(ByteString.copyFrom(target, "UTF8"));
+      bld.setTarget(ByteString.copyFrom(target, StandardCharsets.UTF_8));
     }
     Long lval = node.removeChildLong(INODE_SECTION_MTIME);
     if (lval != null) {
@@ -900,7 +900,7 @@ class OfflineImageReconstructor {
         }
         val = new HexBinaryAdapter().unmarshal(valHex);
       } else {
-        val = valStr.getBytes("UTF8");
+        val = valStr.getBytes(StandardCharsets.UTF_8);
       }
       b.setValue(ByteString.copyFrom(val));
 
@@ -1232,7 +1232,7 @@ class OfflineImageReconstructor {
         }
         String name = inodeRef.removeChildStr("name");
         if (name != null) {
-          bld.setName(ByteString.copyFrom(name, "UTF8"));
+          bld.setName(ByteString.copyFrom(name, StandardCharsets.UTF_8));
         }
         Integer dstSnapshotId = inodeRef.removeChildInt(
             INODE_REFERENCE_SECTION_DST_SNAPSHOT_ID);
@@ -1468,7 +1468,7 @@ class OfflineImageReconstructor {
         bld.setChildrenSize(childrenSize);
         String name = dirDiff.removeChildStr(SECTION_NAME);
         if (name != null) {
-          bld.setName(ByteString.copyFrom(name, "UTF8"));
+          bld.setName(ByteString.copyFrom(name, StandardCharsets.UTF_8));
         }
         Node snapshotCopy = dirDiff.removeChild(
             SNAPSHOT_DIFF_SECTION_SNAPSHOT_COPY);
@@ -1514,7 +1514,7 @@ class OfflineImageReconstructor {
           }
           created.verifyNoRemainingKeys("created");
           FsImageProto.SnapshotDiffSection.CreatedListEntry.newBuilder().
-              setName(ByteString.copyFrom(cleName, "UTF8")).
+              setName(ByteString.copyFrom(cleName, StandardCharsets.UTF_8)).
               build().writeDelimitedTo(out);
           actualCreatedListSize++;
         }
@@ -1571,7 +1571,7 @@ class OfflineImageReconstructor {
         }
         String name = fileDiff.removeChildStr(SECTION_NAME);
         if (name != null) {
-          bld.setName(ByteString.copyFrom(name, "UTF8"));
+          bld.setName(ByteString.copyFrom(name, StandardCharsets.UTF_8));
         }
         Node snapshotCopy = fileDiff.removeChild(
             SNAPSHOT_DIFF_SECTION_SNAPSHOT_COPY);
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
index e54b7332b1ef..e43e9b14b911 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hdfs;
 
 import java.io.*;
+import java.nio.charset.StandardCharsets;
 import java.security.Permission;
 import java.security.PrivilegedExceptionAction;
 import java.text.SimpleDateFormat;
@@ -1917,7 +1918,7 @@ public class TestDFSShell {
     char c = content.charAt(0);
     sb.setCharAt(0, ++c);
     for(MaterializedReplica replica : replicas) {
-      replica.corruptData(sb.toString().getBytes("UTF8"));
+      replica.corruptData(sb.toString().getBytes(StandardCharsets.UTF_8));
     }
   }
 
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
index 6330c1bddb4a..669224818f07 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
@@ -45,6 +45,7 @@ import java.net.InetSocketAddress;
 import java.net.ServerSocket;
 import java.net.SocketTimeoutException;
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.security.NoSuchAlgorithmException;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
@@ -1885,7 +1886,7 @@ public class TestDistributedFileSystem {
           .replication((short) 1)
           .blockSize(4096)
           .build()) {
-        byte[] contentOrigin = content.getBytes("UTF8");
+        byte[] contentOrigin = content.getBytes(StandardCharsets.UTF_8);
         out1.write(contentOrigin);
       }
 
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java
index 7ebf55f571cd..fbd024817523 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java
@@ -194,10 +194,10 @@ public class UpgradeUtilities {
    */
   public static Configuration initializeStorageStateConf(int numDirs,
                                                          Configuration conf) {
-    StringBuffer nameNodeDirs =
-      new StringBuffer(new File(TEST_ROOT_DIR, "name1").toString());
-    StringBuffer dataNodeDirs =
-      new StringBuffer(new File(TEST_ROOT_DIR, "data1").toString());
+    StringBuilder nameNodeDirs =
+        new StringBuilder(new File(TEST_ROOT_DIR, "name1").toString());
+    StringBuilder dataNodeDirs =
+        new StringBuilder(new File(TEST_ROOT_DIR, "data1").toString());
     for (int i = 2; i <= numDirs; i++) {
       nameNodeDirs.append("," + new File(TEST_ROOT_DIR, "name"+i));
       dataNodeDirs.append("," + new File(TEST_ROOT_DIR, "data"+i));
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java
index f8d66c2f2ccc..dbd77c7f13a5 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java
@@ -33,6 +33,7 @@ import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.net.URI;
 import java.net.URISyntaxException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
@@ -147,7 +148,7 @@ public class TestProvidedImpl {
                 newFile.getAbsolutePath());
             newFile.createNewFile();
             Writer writer = new OutputStreamWriter(
-                new FileOutputStream(newFile.getAbsolutePath()), "utf-8");
+                new FileOutputStream(newFile.getAbsolutePath()), 
StandardCharsets.UTF_8);
             for(int i=0; i< BLK_LEN/(Integer.SIZE/8); i++) {
               writer.write(currentCount);
             }
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSPermissionChecker.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSPermissionChecker.java
index f13ed7efdcba..95b63960e348 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSPermissionChecker.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSPermissionChecker.java
@@ -39,6 +39,7 @@ import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.mock;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.function.LongFunction;
 
@@ -433,7 +434,7 @@ public class TestFSPermissionChecker {
     PermissionStatus permStatus = PermissionStatus.createImmutable(owner, 
group,
       FsPermission.createImmutable(perm));
     INodeDirectory inodeDirectory = new INodeDirectory(
-      HdfsConstants.GRANDFATHER_INODE_ID, name.getBytes("UTF-8"), permStatus, 
0L);
+        HdfsConstants.GRANDFATHER_INODE_ID, 
name.getBytes(StandardCharsets.UTF_8), permStatus, 0L);
     parent.addChild(inodeDirectory);
     return inodeDirectory;
   }
@@ -443,8 +444,8 @@ public class TestFSPermissionChecker {
     PermissionStatus permStatus = PermissionStatus.createImmutable(owner, 
group,
       FsPermission.createImmutable(perm));
     INodeFile inodeFile = new INodeFile(HdfsConstants.GRANDFATHER_INODE_ID,
-      name.getBytes("UTF-8"), permStatus, 0L, 0L, null, REPLICATION,
-      PREFERRED_BLOCK_SIZE);
+        name.getBytes(StandardCharsets.UTF_8), permStatus, 0L, 0L, null,
+        REPLICATION, PREFERRED_BLOCK_SIZE);
     parent.addChild(inodeFile);
     return inodeFile;
   }
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
index 2a9eda3ca5e7..8d4281c0385d 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
@@ -316,7 +316,7 @@ public class TestWebHdfsFileSystemContract extends 
FileSystemContractBaseTest {
     String content = "testLengthParamLongerThanFile";
     FSDataOutputStream testFileOut = webhdfs.create(testFile);
     try {
-      testFileOut.write(content.getBytes("US-ASCII"));
+      testFileOut.write(content.getBytes(StandardCharsets.US_ASCII));
     } finally {
       IOUtils.closeStream(testFileOut);
     }
@@ -366,7 +366,7 @@ public class TestWebHdfsFileSystemContract extends 
FileSystemContractBaseTest {
     String content = "testOffsetPlusLengthParamsLongerThanFile";
     FSDataOutputStream testFileOut = webhdfs.create(testFile);
     try {
-      testFileOut.write(content.getBytes("US-ASCII"));
+      testFileOut.write(content.getBytes(StandardCharsets.US_ASCII));
     } finally {
       IOUtils.closeStream(testFileOut);
     }
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java
index 5368bc74812b..b836120a8dcb 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java
@@ -210,7 +210,7 @@ public abstract class RMCommunicator extends AbstractService
         || jobImpl.getInternalState() == JobStateInternal.ERROR) {
       finishState = FinalApplicationStatus.FAILED;
     }
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     for (String s : job.getDiagnostics()) {
       sb.append(s).append("\n");
     }
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java
index 3ed65b94cdcf..982d364f32d6 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java
@@ -120,7 +120,7 @@ public class JobInfo {
 
       List<String> diagnostics = job.getDiagnostics();
       if (diagnostics != null && !diagnostics.isEmpty()) {
-        StringBuffer b = new StringBuffer();
+        StringBuilder b = new StringBuilder();
         for (String diag : diagnostics) {
           b.append(diag);
         }
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobResourceUploaderWithSharedCache.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobResourceUploaderWithSharedCache.java
index 5555043bcdfc..002ee712d2d5 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobResourceUploaderWithSharedCache.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobResourceUploaderWithSharedCache.java
@@ -32,6 +32,7 @@ import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.Map;
 import java.util.jar.JarOutputStream;
 import java.util.zip.ZipEntry;
@@ -339,12 +340,12 @@ public class TestJobResourceUploaderWithSharedCache {
       IOException {
     FileOutputStream fos =
         new FileOutputStream(new File(p.toUri().getPath()));
-    JarOutputStream jos = new JarOutputStream(fos);
-    ZipEntry ze = new ZipEntry("distributed.jar.inside" + index);
-    jos.putNextEntry(ze);
-    jos.write(("inside the jar!" + index).getBytes());
-    jos.closeEntry();
-    jos.close();
+    try (JarOutputStream jos = new JarOutputStream(fos)) {
+      ZipEntry ze = new ZipEntry("distributed.jar.inside" + index);
+      jos.putNextEntry(ze);
+      jos.write(("inside the jar!" + index).getBytes());
+      jos.closeEntry();
+    }
     localFs.setPermission(p, new FsPermission("700"));
     return p;
   }
@@ -354,12 +355,12 @@ public class TestJobResourceUploaderWithSharedCache {
     Path archive = new Path(testRootDir, archiveFile);
     Path file = new Path(testRootDir, filename);
     DataOutputStream out = localFs.create(archive);
-    ZipOutputStream zos = new ZipOutputStream(out);
-    ZipEntry ze = new ZipEntry(file.toString());
-    zos.putNextEntry(ze);
-    zos.write(input.getBytes("UTF-8"));
-    zos.closeEntry();
-    zos.close();
+    try (ZipOutputStream zos = new ZipOutputStream(out)) {
+      ZipEntry ze = new ZipEntry(file.toString());
+      zos.putNextEntry(ze);
+      zos.write(input.getBytes(StandardCharsets.UTF_8));
+      zos.closeEntry();
+    }
     return archive;
   }
 }
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java
index ae68d74d8d4d..ec44dd77efbc 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java
@@ -92,7 +92,7 @@ public class TestConcatenatedCompressedInput {
 
   private static LineReader makeStream(String str) throws IOException {
     return new LineReader(new ByteArrayInputStream(
-            str.getBytes("UTF-8")), defaultConf);
+            str.getBytes(StandardCharsets.UTF_8)), defaultConf);
   }
 
   private static void writeFile(FileSystem fs, Path name,
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java
index 4e14797a1619..f0b1df3eac18 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java
@@ -26,6 +26,7 @@ import 
org.apache.hadoop.mapreduce.lib.fieldsel.TestMRFieldSelection;
 import org.junit.Test;
 import static org.junit.Assert.assertEquals;
 
+import java.nio.charset.StandardCharsets;
 import java.text.NumberFormat;
 
 public class TestFieldSelection {
@@ -60,7 +61,7 @@ private static NumberFormat idFormat = 
NumberFormat.getInstance();
     TestMRFieldSelection.constructInputOutputData(inputData,
       expectedOutput, numOfInputLines);
     FSDataOutputStream fileOut = fs.create(new Path(INPUT_DIR, inputFile));
-    fileOut.write(inputData.toString().getBytes("utf-8"));
+    fileOut.write(inputData.toString().getBytes(StandardCharsets.UTF_8));
     fileOut.close();
 
     System.out.println("inputData:");
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java
index 0991ae0b38b3..301cadb08be1 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java
@@ -30,6 +30,7 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 
@@ -134,9 +135,7 @@ public class TestKeyValueTextInputFormat {
     }
   }
   private LineReader makeStream(String str) throws IOException {
-    return new LineReader(new ByteArrayInputStream
-                                           (str.getBytes("UTF-8")), 
-                                           defaultConf);
+    return new LineReader(new ByteArrayInputStream(str.getBytes(UTF_8)), 
defaultConf);
   }
   @Test
   public void testUTF8() throws Exception {
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java
index 22d9a57b89a3..9a2576ec6649 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java
@@ -42,6 +42,7 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.junit.Assert.*;
 
 public class TestTextInputFormat {
@@ -330,14 +331,10 @@ public class TestTextInputFormat {
   }
 
   private static LineReader makeStream(String str) throws IOException {
-    return new LineReader(new ByteArrayInputStream
-                                             (str.getBytes("UTF-8")), 
-                                           defaultConf);
+    return new LineReader(new ByteArrayInputStream(str.getBytes(UTF_8)), 
defaultConf);
   }
   private static LineReader makeStream(String str, int bufsz) throws 
IOException {
-    return new LineReader(new ByteArrayInputStream
-                                             (str.getBytes("UTF-8")), 
-                                           bufsz);
+    return new LineReader(new ByteArrayInputStream(str.getBytes(UTF_8)), 
bufsz);
   }
 
   @Test (timeout=5000)
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java
index b839a2c3afe6..845139bf35b7 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java
@@ -26,6 +26,7 @@ import org.junit.Test;
 import static org.junit.Assert.assertEquals;
 
 import java.io.*;
+import java.nio.charset.StandardCharsets;
 import java.util.*;
 import java.text.NumberFormat;
 
@@ -55,8 +56,8 @@ public class TestAggregates {
     fs.mkdirs(INPUT_DIR);
     fs.delete(OUTPUT_DIR, true);
 
-    StringBuffer inputData = new StringBuffer();
-    StringBuffer expectedOutput = new StringBuffer();
+    StringBuilder inputData = new StringBuilder();
+    StringBuilder expectedOutput = new StringBuilder();
     expectedOutput.append("max\t19\n");
     expectedOutput.append("min\t1\n"); 
 
@@ -76,7 +77,7 @@ public class TestAggregates {
     expectedOutput.append("uniq_count\t15\n");
 
 
-    fileOut.write(inputData.toString().getBytes("utf-8"));
+    fileOut.write(inputData.toString().getBytes(StandardCharsets.UTF_8));
     fileOut.close();
 
     System.out.println("inputData:");
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java
index 3aac54e7159e..1c354d1348fd 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java
@@ -30,6 +30,7 @@ import 
org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
 import org.junit.Test;
 
+import java.nio.charset.StandardCharsets;
 import java.text.NumberFormat;
 
 import static org.junit.Assert.assertEquals;
@@ -61,8 +62,8 @@ public class TestMapReduceAggregates {
     fs.mkdirs(INPUT_DIR);
     fs.delete(OUTPUT_DIR, true);
 
-    StringBuffer inputData = new StringBuffer();
-    StringBuffer expectedOutput = new StringBuffer();
+    StringBuilder inputData = new StringBuilder();
+    StringBuilder expectedOutput = new StringBuilder();
     expectedOutput.append("max\t19\n");
     expectedOutput.append("min\t1\n"); 
 
@@ -82,7 +83,7 @@ public class TestMapReduceAggregates {
     expectedOutput.append("uniq_count\t15\n");
 
 
-    fileOut.write(inputData.toString().getBytes("utf-8"));
+    fileOut.write(inputData.toString().getBytes(StandardCharsets.UTF_8));
     fileOut.close();
 
     System.out.println("inputData:");
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRKeyValueTextInputFormat.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRKeyValueTextInputFormat.java
index 537d23c5f0ee..3784846002a6 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRKeyValueTextInputFormat.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRKeyValueTextInputFormat.java
@@ -45,6 +45,7 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.junit.Assert.*;
 
 public class TestMRKeyValueTextInputFormat {
@@ -253,9 +254,7 @@ public class TestMRKeyValueTextInputFormat {
   }
 
   private LineReader makeStream(String str) throws IOException {
-    return new LineReader(new ByteArrayInputStream
-                                           (str.getBytes("UTF-8")), 
-                                           defaultConf);
+    return new LineReader(new ByteArrayInputStream(str.getBytes(UTF_8)), 
defaultConf);
   }
   
   @Test
diff --git 
a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/TestMojo.java
 
b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/TestMojo.java
index 95b6264ba603..ba3b0d7a5981 100644
--- 
a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/TestMojo.java
+++ 
b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/TestMojo.java
@@ -14,6 +14,7 @@
 
 package org.apache.hadoop.maven.plugin.cmakebuilder;
 
+import java.nio.charset.StandardCharsets;
 import java.util.Locale;
 import org.apache.hadoop.maven.plugin.util.Exec;
 import org.apache.maven.execution.MavenSession;
@@ -165,7 +166,7 @@ public class TestMojo extends AbstractMojo {
                 testName + ".pstatus"));
     BufferedWriter out = null;
     try {
-      out = new BufferedWriter(new OutputStreamWriter(fos, "UTF8"));
+      out = new BufferedWriter(new OutputStreamWriter(fos, 
StandardCharsets.UTF_8));
       out.write(status + "\n");
     } finally {
       if (out != null) {
diff --git 
a/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java
 
b/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java
index c72a926b1317..0773c79bdf99 100644
--- 
a/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java
+++ 
b/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java
@@ -691,7 +691,7 @@ public class HadoopArchives implements Tool {
       if (value.isDir()) { 
         towrite = encodeName(relPath.toString())
                   + " dir " + propStr + " 0 0 ";
-        StringBuffer sbuff = new StringBuffer();
+        StringBuilder sbuff = new StringBuilder();
         sbuff.append(towrite);
         for (String child: value.children) {
           sbuff.append(encodeName(child) + " ");
diff --git 
a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/SharedKeyCredentials.java
 
b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/SharedKeyCredentials.java
index 5f54673d7ae3..1aee53def1e2 100644
--- 
a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/SharedKeyCredentials.java
+++ 
b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/SharedKeyCredentials.java
@@ -24,6 +24,7 @@ import java.io.UnsupportedEncodingException;
 import java.net.HttpURLConnection;
 import java.net.URL;
 import java.net.URLDecoder;
+import java.nio.charset.StandardCharsets;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
@@ -98,12 +99,7 @@ public class SharedKeyCredentials {
   }
 
   private String computeHmac256(final String stringToSign) {
-    byte[] utf8Bytes;
-    try {
-      utf8Bytes = stringToSign.getBytes(AbfsHttpConstants.UTF_8);
-    } catch (final UnsupportedEncodingException e) {
-      throw new IllegalArgumentException(e);
-    }
+    byte[] utf8Bytes = stringToSign.getBytes(StandardCharsets.UTF_8);
     byte[] hmac;
     synchronized (this) {
       hmac = hmacSha256.doFinal(utf8Bytes);
diff --git 
a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java
 
b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java
index 5735423aaf92..c9f89e664334 100644
--- 
a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java
+++ 
b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java
@@ -390,7 +390,7 @@ public class ITestAzureBlobFileSystemDelegationSAS extends 
AbstractAbfsIntegrati
     fs.create(reqPath).close();
 
     final String propertyName = "user.mime_type";
-    final byte[] propertyValue = "text/plain".getBytes("utf-8");
+    final byte[] propertyValue = "text/plain".getBytes(StandardCharsets.UTF_8);
     fs.setXAttr(reqPath, propertyName, propertyValue);
 
     assertArrayEquals(propertyValue, fs.getXAttr(reqPath, propertyName));
diff --git 
a/hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/ITestProvidedImplementation.java
 
b/hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/ITestProvidedImplementation.java
index cf86dd7dcbac..361243fd6968 100644
--- 
a/hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/ITestProvidedImplementation.java
+++ 
b/hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/ITestProvidedImplementation.java
@@ -28,6 +28,7 @@ import java.net.URI;
 import java.nio.ByteBuffer;
 import java.nio.channels.Channels;
 import java.nio.channels.ReadableByteChannel;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -180,7 +181,7 @@ public class ITestProvidedImplementation {
           LOG.info("Creating " + newFile.toString());
           newFile.createNewFile();
           Writer writer = new OutputStreamWriter(
-              new FileOutputStream(newFile.getAbsolutePath()), "utf-8");
+              new FileOutputStream(newFile.getAbsolutePath()), 
StandardCharsets.UTF_8);
           for(int j=0; j < baseFileLen*i; j++) {
             writer.write("0");
           }
diff --git 
a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/ExecutionSummarizer.java
 
b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/ExecutionSummarizer.java
index 9ecd9e8e5da8..a4a2a3538ced 100644
--- 
a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/ExecutionSummarizer.java
+++ 
b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/ExecutionSummarizer.java
@@ -222,7 +222,7 @@ class ExecutionSummarizer implements StatListener<JobStats> 
{
   // Gets the stringified version of DataStatistics
   static String stringifyDataStatistics(DataStatistics stats) {
     if (stats != null) {
-      StringBuffer buffer = new StringBuffer();
+      StringBuilder buffer = new StringBuilder();
       String compressionStatus = stats.isDataCompressed() 
                                  ? "Compressed" 
                                  : "Uncompressed";
diff --git 
a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamUtil.java
 
b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamUtil.java
index a6983e1c6c30..cfa6e77b0b03 100644
--- 
a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamUtil.java
+++ 
b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamUtil.java
@@ -128,7 +128,7 @@ public class StreamUtil {
   static final String regexpSpecials = "[]()?*+|.!^-\\~@";
 
   public static String regexpEscape(String plain) {
-    StringBuffer buf = new StringBuffer();
+    StringBuilder buf = new StringBuilder();
     char[] ch = plain.toCharArray();
     int csup = ch.length;
     for (int c = 0; c < csup; c++) {
diff --git 
a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamDataProtocol.java
 
b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamDataProtocol.java
index 14f0f9607e51..23bb36ba0e2c 100644
--- 
a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamDataProtocol.java
+++ 
b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamDataProtocol.java
@@ -19,11 +19,8 @@
 package org.apache.hadoop.streaming;
 
 import java.io.*;
-import java.util.*;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
+import java.nio.charset.StandardCharsets;
 import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner;
 
 import org.junit.Test;
@@ -59,7 +56,7 @@ public class TestStreamDataProtocol
   {
     DataOutputStream out = new DataOutputStream(
                                                 new 
FileOutputStream(INPUT_FILE.getAbsoluteFile()));
-    out.write(input.getBytes("UTF-8"));
+    out.write(input.getBytes(StandardCharsets.UTF_8));
     out.close();
   }
 
diff --git 
a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamReduceNone.java
 
b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamReduceNone.java
index 766402184ceb..4bb20c762140 100644
--- 
a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamReduceNone.java
+++ 
b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamReduceNone.java
@@ -19,11 +19,8 @@
 package org.apache.hadoop.streaming;
 
 import java.io.*;
-import java.util.*;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
+import java.nio.charset.StandardCharsets;
 import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
 
 import static org.junit.Assert.*;
 import org.junit.Test;
@@ -56,7 +53,7 @@ public class TestStreamReduceNone
   {
     DataOutputStream out = new DataOutputStream(
                                                 new 
FileOutputStream(INPUT_FILE.getAbsoluteFile()));
-    out.write(input.getBytes("UTF-8"));
+    out.write(input.getBytes(StandardCharsets.UTF_8));
     out.close();
   }
 
diff --git 
a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java
 
b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java
index 53009dbbabc6..077b02c6cb39 100644
--- 
a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java
+++ 
b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.streaming;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
 /**
  * This class tests StreamXmlRecordReader
@@ -44,9 +45,9 @@ public class TestStreamXmlRecordReader extends TestStreaming {
     FileOutputStream out = new FileOutputStream(INPUT_FILE.getAbsoluteFile());
     String dummyXmlStartTag = "<PATTERN>\n";
     String dummyXmlEndTag = "</PATTERN>\n";
-    out.write(dummyXmlStartTag.getBytes("UTF-8"));
-    out.write(input.getBytes("UTF-8"));
-    out.write(dummyXmlEndTag.getBytes("UTF-8"));
+    out.write(dummyXmlStartTag.getBytes(StandardCharsets.UTF_8));
+    out.write(input.getBytes(StandardCharsets.UTF_8));
+    out.write(dummyXmlEndTag.getBytes(StandardCharsets.UTF_8));
     out.close();
   }
 
diff --git 
a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreaming.java
 
b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreaming.java
index 4f39120a1629..5139cf617da4 100644
--- 
a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreaming.java
+++ 
b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreaming.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.streaming;
 
 import java.io.*;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashSet;
@@ -120,7 +121,7 @@ public class TestStreaming
   {
     DataOutputStream out = getFileSystem().create(new Path(
       INPUT_FILE.getPath()));
-    out.write(getInputData().getBytes("UTF-8"));
+    out.write(getInputData().getBytes(StandardCharsets.UTF_8));
     out.close();
   }
 
diff --git 
a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingKeyValue.java
 
b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingKeyValue.java
index c21cb159f4f3..ff95bd49cea3 100644
--- 
a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingKeyValue.java
+++ 
b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingKeyValue.java
@@ -22,6 +22,7 @@ import org.junit.Test;
 import static org.junit.Assert.*;
 
 import java.io.*;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.mapreduce.MRJobConfig;
@@ -68,7 +69,7 @@ public class TestStreamingKeyValue
   {
     DataOutputStream out = new DataOutputStream(
        new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
-    out.write(input.getBytes("UTF-8"));
+    out.write(input.getBytes(StandardCharsets.UTF_8));
     out.close();
   }
 
diff --git 
a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingSeparator.java
 
b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingSeparator.java
index f8167bbdd7a8..66ee174be759 100644
--- 
a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingSeparator.java
+++ 
b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingSeparator.java
@@ -22,11 +22,8 @@ import org.junit.Test;
 import static org.junit.Assert.*;
 
 import java.io.*;
-import java.util.*;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
+import java.nio.charset.StandardCharsets;
 import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
 
 /**
  * This class tests hadoopStreaming with customized separator in MapReduce 
local mode.
@@ -64,7 +61,7 @@ public class TestStreamingSeparator
   {
     DataOutputStream out = new DataOutputStream(
                                                 new 
FileOutputStream(INPUT_FILE.getAbsoluteFile()));
-    out.write(input.getBytes("UTF-8"));
+    out.write(input.getBytes(StandardCharsets.UTF_8));
     out.close();
   }
 
diff --git 
a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
 
b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
index 05a050cac831..02daa1894c9d 100644
--- 
a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
+++ 
b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
@@ -22,6 +22,7 @@ import java.io.DataOutputStream;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
@@ -48,7 +49,7 @@ public class TestTypedBytesStreaming {
 
   protected void createInput() throws IOException {
     DataOutputStream out = new DataOutputStream(new 
FileOutputStream(INPUT_FILE.getAbsoluteFile()));
-    out.write(input.getBytes("UTF-8"));
+    out.write(input.getBytes(StandardCharsets.UTF_8));
     out.close();
   }
 
diff --git 
a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/mapreduce/TestStreamXmlRecordReader.java
 
b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/mapreduce/TestStreamXmlRecordReader.java
index 5bf2fe52d447..270a1f7fd914 100644
--- 
a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/mapreduce/TestStreamXmlRecordReader.java
+++ 
b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/mapreduce/TestStreamXmlRecordReader.java
@@ -101,9 +101,9 @@ public class TestStreamXmlRecordReader {
     FileOutputStream out = new FileOutputStream(INPUT_FILE.getAbsoluteFile());
     String dummyXmlStartTag = "<PATTERN>\n";
     String dummyXmlEndTag = "</PATTERN>\n";
-    out.write(dummyXmlStartTag.getBytes("UTF-8"));
-    out.write(input.getBytes("UTF-8"));
-    out.write(dummyXmlEndTag.getBytes("UTF-8"));
+    out.write(dummyXmlStartTag.getBytes(StandardCharsets.UTF_8));
+    out.write(input.getBytes(StandardCharsets.UTF_8));
+    out.write(dummyXmlEndTag.getBytes(StandardCharsets.UTF_8));
     out.close();
   }
 
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestTopCLI.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestTopCLI.java
index 63ebffaca440..6aaab2f18fe1 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestTopCLI.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestTopCLI.java
@@ -25,6 +25,7 @@ import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
 import java.net.URL;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
@@ -157,7 +158,7 @@ public class TestTopCLI {
       System.setErr(out);
       topcli.showTopScreen();
       out.flush();
-      actual = outStream.toString("UTF-8");
+      actual = outStream.toString(StandardCharsets.UTF_8.name());
     }
 
     String expected = "NodeManager(s)"
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
index 57ec83fc6149..25f7747df881 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
@@ -38,6 +38,7 @@ import java.io.OutputStreamWriter;
 import java.io.PrintStream;
 import java.io.PrintWriter;
 import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
 import java.text.DecimalFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -364,7 +365,7 @@ public class TestYarnCLI {
     verify(client).getContainers(attemptId);
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     OutputStreamWriter stream =
-        new OutputStreamWriter(baos, "UTF-8");
+        new OutputStreamWriter(baos, StandardCharsets.UTF_8);
     PrintWriter pw = new PrintWriter(stream);
     pw.println("Total number of containers :3");
     pw.printf(ApplicationCLI.CONTAINER_PATTERN, "Container-Id", "Start Time",
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java
index 5a4beca990c3..007721f2eca6 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java
@@ -30,6 +30,7 @@ import java.io.StringWriter;
 import java.io.UnsupportedEncodingException;
 import java.io.Writer;
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.nio.file.StandardOpenOption;
@@ -159,7 +160,7 @@ public class TestAggregatedLogFormat {
 
     File outputFile = new File(new File(srcFilePath.toString()), fileName);
     FileOutputStream os = new FileOutputStream(outputFile);
-    final OutputStreamWriter osw = new OutputStreamWriter(os, "UTF8");
+    final OutputStreamWriter osw = new OutputStreamWriter(os, 
StandardCharsets.UTF_8);
     final int ch = filler;
 
     UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
@@ -473,7 +474,7 @@ public class TestAggregatedLogFormat {
     }
     File outputFile = new File(new File(srcFilePath.toString()), fileName);
     FileOutputStream os = new FileOutputStream(outputFile);
-    OutputStreamWriter osw = new OutputStreamWriter(os, "UTF8");
+    OutputStreamWriter osw = new OutputStreamWriter(os, 
StandardCharsets.UTF_8);
     return osw;
   }
 }
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
index eb3db5eee190..355398e9f728 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
@@ -24,6 +24,7 @@ import java.io.FileInputStream;
 import java.io.InputStream;
 import java.net.HttpURLConnection;
 import java.net.URL;
+import java.nio.charset.StandardCharsets;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -227,6 +228,6 @@ public class TestApplicationHistoryServer {
     while ((read = input.read(buffer)) >= 0) {
       data.write(buffer, 0, read);
     }
-    return new String(data.toByteArray(), "UTF-8");
+    return new String(data.toByteArray(), StandardCharsets.UTF_8);
   }
 }
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/amrmproxy/FederationInterceptor.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/amrmproxy/FederationInterceptor.java
index 9c4c2c72e5c5..86d78f2fc351 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/amrmproxy/FederationInterceptor.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/amrmproxy/FederationInterceptor.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.yarn.server.nodemanager.amrmproxy;
 import java.io.ByteArrayInputStream;
 import java.io.DataInputStream;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
@@ -585,7 +586,7 @@ public class FederationInterceptor extends 
AbstractRequestInterceptor {
         // entry for subClusterId -> UAM AMRMTokenIdentifier
         String scId = key.substring(NMSS_SECONDARY_SC_PREFIX.length());
         Token<AMRMTokenIdentifier> aMRMTokenIdentifier = new Token<>();
-        aMRMTokenIdentifier.decodeFromUrlString(new String(value, 
STRING_TO_BYTE_FORMAT));
+        aMRMTokenIdentifier.decodeFromUrlString(new String(value, 
StandardCharsets.UTF_8));
         uamMap.put(scId, aMRMTokenIdentifier);
         LOG.debug("Recovered UAM in {} from NMSS.", scId);
       }
@@ -1345,7 +1346,7 @@ public class FederationInterceptor extends 
AbstractRequestInterceptor {
           } else if (getNMStateStore() != null) {
             getNMStateStore().storeAMRMProxyAppContextEntry(attemptId,
                 NMSS_SECONDARY_SC_PREFIX + subClusterId,
-                token.encodeToUrlString().getBytes(STRING_TO_BYTE_FORMAT));
+                token.encodeToUrlString().getBytes(StandardCharsets.UTF_8));
           }
         } catch (Throwable e) {
           LOG.error("Failed to persist UAM token from {} Application {}",
@@ -1884,7 +1885,7 @@ public class FederationInterceptor extends 
AbstractRequestInterceptor {
           try {
             getNMStateStore().storeAMRMProxyAppContextEntry(attemptId,
                 NMSS_SECONDARY_SC_PREFIX + subClusterId.getId(),
-                newToken.encodeToUrlString().getBytes(STRING_TO_BYTE_FORMAT));
+                newToken.encodeToUrlString().getBytes(StandardCharsets.UTF_8));
           } catch (IOException e) {
             LOG.error("Error storing UAM token as AMRMProxy "
                 + "context entry in NMSS for {}.", attemptId, e);
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsBlkioResourceHandlerImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsBlkioResourceHandlerImpl.java
index 20d9d5ccf9a3..865d2b19fd02 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsBlkioResourceHandlerImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsBlkioResourceHandlerImpl.java
@@ -31,6 +31,7 @@ import 
org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.privileg
 
 import java.io.File;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.util.ArrayList;
@@ -73,7 +74,7 @@ public class CGroupsBlkioResourceHandlerImpl implements 
DiskResourceHandler {
     // are using the CFQ scheduler. If they aren't print a warning
     try {
       byte[] contents = Files.readAllBytes(Paths.get(PARTITIONS_FILE));
-      data = new String(contents, "UTF-8").trim();
+      data = new String(contents, StandardCharsets.UTF_8).trim();
     } catch (IOException e) {
       String msg = "Couldn't read " + PARTITIONS_FILE +
           "; can't determine disk scheduler type";
@@ -96,7 +97,7 @@ public class CGroupsBlkioResourceHandlerImpl implements 
DiskResourceHandler {
             if (schedulerFile.exists()) {
               try {
                 byte[] contents = Files.readAllBytes(Paths.get(schedulerPath));
-                String schedulerString = new String(contents, "UTF-8").trim();
+                String schedulerString = new String(contents, 
StandardCharsets.UTF_8).trim();
                 if (!schedulerString.contains("[cfq]")) {
                   LOG.warn("Device " + partition + " does not use the CFQ"
                       + " scheduler; disk isolation using "
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsCpuResourceHandlerImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsCpuResourceHandlerImpl.java
index 4ce1375f8e10..f724b8803d5f 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsCpuResourceHandlerImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsCpuResourceHandlerImpl.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
 
 import java.io.File;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -132,7 +133,7 @@ public class CGroupsCpuResourceHandlerImpl implements 
CpuResourceHandler {
     File quotaFile = new File(path,
         CPU.getName() + "." + CGroupsHandler.CGROUP_CPU_QUOTA_US);
     if (quotaFile.exists()) {
-      String contents = FileUtils.readFileToString(quotaFile, "UTF-8");
+      String contents = FileUtils.readFileToString(quotaFile, 
StandardCharsets.UTF_8);
       int quotaUS = Integer.parseInt(contents.trim());
       if (quotaUS != -1) {
         return true;
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsHandlerImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsHandlerImpl.java
index 03038b86fc46..40149abd40de 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsHandlerImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsHandlerImpl.java
@@ -39,6 +39,7 @@ import org.apache.hadoop.yarn.util.Clock;
 import org.apache.hadoop.yarn.util.SystemClock;
 
 import java.io.*;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.util.Arrays;
@@ -216,7 +217,7 @@ class CGroupsHandlerImpl implements CGroupsHandler {
 
     try {
       FileInputStream fis = new FileInputStream(new File(mtab));
-      in = new BufferedReader(new InputStreamReader(fis, "UTF-8"));
+      in = new BufferedReader(new InputStreamReader(fis, 
StandardCharsets.UTF_8));
 
       for (String str = in.readLine(); str != null;
            str = in.readLine()) {
@@ -474,7 +475,7 @@ class CGroupsHandlerImpl implements CGroupsHandler {
     if (LOG.isDebugEnabled()) {
       try (BufferedReader inl =
           new BufferedReader(new InputStreamReader(new FileInputStream(cgf
-              + "/tasks"), "UTF-8"))) {
+              + "/tasks"), StandardCharsets.UTF_8))) {
         str = inl.readLine();
         if (str != null) {
           LOG.debug("First line in cgroup tasks file: {} {}", cgf, str);
@@ -559,7 +560,7 @@ class CGroupsHandlerImpl implements CGroupsHandler {
 
     try {
       File file = new File(cGroupParamPath);
-      Writer w = new OutputStreamWriter(new FileOutputStream(file), "UTF-8");
+      Writer w = new OutputStreamWriter(new FileOutputStream(file), 
StandardCharsets.UTF_8);
       pw = new PrintWriter(w);
       pw.write(value);
     } catch (IOException e) {
@@ -595,7 +596,7 @@ class CGroupsHandlerImpl implements CGroupsHandler {
 
     try {
       byte[] contents = Files.readAllBytes(Paths.get(cGroupParamPath));
-      return new String(contents, "UTF-8").trim();
+      return new String(contents, StandardCharsets.UTF_8).trim();
     } catch (IOException e) {
       throw new ResourceHandlerException(
           "Unable to read from " + cGroupParamPath);
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java
index b171ed00e3eb..e5abca282669 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java
@@ -31,6 +31,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.*;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.BitSet;
 import java.util.HashMap;
@@ -619,9 +620,9 @@ import java.util.regex.Pattern;
             File(tmpDirPath));
 
         try (
-            Writer writer = new OutputStreamWriter(new 
FileOutputStream(tcCmds),
-                "UTF-8");
-            PrintWriter printWriter = new PrintWriter(writer)) {
+                Writer writer = new OutputStreamWriter(new 
FileOutputStream(tcCmds),
+                StandardCharsets.UTF_8);
+                PrintWriter printWriter = new PrintWriter(writer)) {
           for (String command : commands) {
             printWriter.println(command);
           }
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/DefaultLinuxContainerRuntime.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/DefaultLinuxContainerRuntime.java
index 20e281b1214c..168f033c24cb 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/DefaultLinuxContainerRuntime.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/DefaultLinuxContainerRuntime.java
@@ -48,6 +48,7 @@ import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
 import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -257,9 +258,9 @@ public class DefaultLinuxContainerRuntime implements 
LinuxContainerRuntime {
       File commandFile = File.createTempFile(TMP_FILE_PREFIX + filePrefix,
           TMP_FILE_SUFFIX, cmdDir);
       try (
-          Writer writer = new OutputStreamWriter(
-              new FileOutputStream(commandFile.toString()), "UTF-8");
-          PrintWriter printWriter = new PrintWriter(writer);
+              Writer writer = new OutputStreamWriter(
+              new FileOutputStream(commandFile.toString()), 
StandardCharsets.UTF_8);
+              PrintWriter printWriter = new PrintWriter(writer);
       ) {
         Map<String, List<String>> cmd = new HashMap<String, List<String>>();
         // command = exec
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/DockerClient.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/DockerClient.java
index 6cad26e4c04d..b97890078c23 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/DockerClient.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/DockerClient.java
@@ -38,6 +38,7 @@ import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
 import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.Map;
 
@@ -55,9 +56,9 @@ public final class DockerClient {
     File dockerEnvFile = File.createTempFile(TMP_FILE_PREFIX + filePrefix,
         TMP_ENV_FILE_SUFFIX, cmdDir);
     try (
-        Writer envWriter = new OutputStreamWriter(
-            new FileOutputStream(dockerEnvFile), "UTF-8");
-        PrintWriter envPrintWriter = new PrintWriter(envWriter);
+            Writer envWriter = new OutputStreamWriter(
+            new FileOutputStream(dockerEnvFile), StandardCharsets.UTF_8);
+            PrintWriter envPrintWriter = new PrintWriter(envWriter);
     ) {
       for (Map.Entry<String, String> entry : cmd.getEnv()
           .entrySet()) {
@@ -94,7 +95,7 @@ public final class DockerClient {
           TMP_FILE_SUFFIX, cmdDir);
       try (
         Writer writer = new OutputStreamWriter(
-            new FileOutputStream(dockerCommandFile.toString()), "UTF-8");
+              new FileOutputStream(dockerCommandFile.toString()), 
StandardCharsets.UTF_8);
         PrintWriter printWriter = new PrintWriter(writer);
       ) {
         printWriter.println("[docker-command-execution]");
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/gpu/NvidiaDockerV1CommandPlugin.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/gpu/NvidiaDockerV1CommandPlugin.java
index 252e9b29b8fd..60f5313a90cf 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/gpu/NvidiaDockerV1CommandPlugin.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/gpu/NvidiaDockerV1CommandPlugin.java
@@ -38,6 +38,7 @@ import java.io.Serializable;
 import java.io.StringWriter;
 import java.net.URL;
 import java.net.URLConnection;
+import java.nio.charset.StandardCharsets;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -104,7 +105,7 @@ public class NvidiaDockerV1CommandPlugin implements 
DockerCommandPlugin {
       uc.setRequestProperty("X-Requested-With", "Curl");
 
       StringWriter writer = new StringWriter();
-      IOUtils.copy(uc.getInputStream(), writer, "utf-8");
+      IOUtils.copy(uc.getInputStream(), writer, StandardCharsets.UTF_8);
       cliOptions = writer.toString();
 
       LOG.info("Additional docker CLI options from plugin to run GPU "
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/CgroupsLCEResourcesHandler.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/CgroupsLCEResourcesHandler.java
index b97549305adf..0dde3b1576f2 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/CgroupsLCEResourcesHandler.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/CgroupsLCEResourcesHandler.java
@@ -27,6 +27,7 @@ import java.io.InputStreamReader;
 import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
 import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -220,7 +221,7 @@ public class CgroupsLCEResourcesHandler implements 
LCEResourcesHandler {
     PrintWriter pw = null;
     try {
       File file = new File(path + "/" + param);
-      Writer w = new OutputStreamWriter(new FileOutputStream(file), "UTF-8");
+      Writer w = new OutputStreamWriter(new FileOutputStream(file), 
StandardCharsets.UTF_8);
       pw = new PrintWriter(w);
       pw.write(value);
     } catch (IOException e) {
@@ -249,7 +250,7 @@ public class CgroupsLCEResourcesHandler implements 
LCEResourcesHandler {
     if (LOG.isDebugEnabled()) {
       try (BufferedReader inl =
             new BufferedReader(new InputStreamReader(new FileInputStream(cgf
-              + "/tasks"), "UTF-8"))) {
+              + "/tasks"), StandardCharsets.UTF_8))) {
         str = inl.readLine();
         if (str != null) {
           LOG.debug("First line in cgroup tasks file: {} {}", cgf, str);
@@ -403,7 +404,7 @@ public class CgroupsLCEResourcesHandler implements 
LCEResourcesHandler {
 
     try {
       FileInputStream fis = new FileInputStream(new File(getMtabFileName()));
-      in = new BufferedReader(new InputStreamReader(fis, "UTF-8"));
+      in = new BufferedReader(new InputStreamReader(fis, 
StandardCharsets.UTF_8));
 
       for (String str = in.readLine(); str != null;
           str = in.readLine()) {
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java
index c492ee4ae745..318d7740727f 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java
@@ -22,13 +22,14 @@ import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.util.ConverterUtils;
 
 /**
  * Helper functionality to read the pid from a file.
@@ -57,7 +58,7 @@ public class ProcessIdFileReader {
       File file = new File(path.toString());
       if (file.exists()) {
         FileInputStream fis = new FileInputStream(file);
-        bufReader = new BufferedReader(new InputStreamReader(fis, "UTF-8"));
+        bufReader = new BufferedReader(new InputStreamReader(fis, 
StandardCharsets.UTF_8));
 
         while (true) {
           String line = bufReader.readLine();
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java
index ff47b606c740..d80165237798 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java
@@ -346,7 +346,7 @@ public class FSConfigToCSConfigConverter {
       if (!rulesToFile) {
         String json =
             ((ByteArrayOutputStream)mappingRulesOutputStream)
-            .toString(StandardCharsets.UTF_8.displayName());
+            .toString(StandardCharsets.UTF_8.name());
         capacitySchedulerConfig.setMappingRuleJson(json);
       }
     } else {
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java
index a51bd2afec67..2cf5c9b07339 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java
@@ -817,7 +817,7 @@ public class TestRMWebServicesAppsModification extends 
JerseyTestBase {
     HashMap<String, String> tokens = new HashMap<>();
     HashMap<String, String> secrets = new HashMap<>();
     secrets.put("secret1", Base64.encodeBase64String(
-        "mysecret".getBytes("UTF8")));
+        "mysecret".getBytes(StandardCharsets.UTF_8)));
     credentials.setSecrets(secrets);
     credentials.setTokens(tokens);
     ApplicationSubmissionContextInfo appInfo = new 
ApplicationSubmissionContextInfo();
@@ -840,7 +840,7 @@ public class TestRMWebServicesAppsModification extends 
JerseyTestBase {
     appInfo.getContainerLaunchContextInfo().setEnvironment(environment);
     appInfo.getContainerLaunchContextInfo().setAcls(acls);
     appInfo.getContainerLaunchContextInfo().getAuxillaryServiceData()
-      .put("test", 
Base64.encodeBase64URLSafeString("value12".getBytes("UTF8")));
+      .put("test", 
Base64.encodeBase64URLSafeString("value12".getBytes(StandardCharsets.UTF_8)));
     appInfo.getContainerLaunchContextInfo().setCredentials(credentials);
     appInfo.getResource().setMemory(1024);
     appInfo.getResource().setvCores(1);
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java
index ea286568340c..678e7a7d2dbf 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java
@@ -31,6 +31,7 @@ import java.io.OutputStream;
 import java.io.StringWriter;
 import java.net.HttpURLConnection;
 import java.net.URL;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.concurrent.Callable;
@@ -250,7 +251,7 @@ public class TestRMWebServicesDelegationTokenAuthentication 
{
       InputStream errorStream = conn.getErrorStream();
       String error = "";
       BufferedReader reader = null;
-      reader = new BufferedReader(new InputStreamReader(errorStream, "UTF8"));
+      reader = new BufferedReader(new InputStreamReader(errorStream, 
StandardCharsets.UTF_8));
       for (String line; (line = reader.readLine()) != null;) {
         error += line;
       }
@@ -356,7 +357,7 @@ public class TestRMWebServicesDelegationTokenAuthentication 
{
         assertEquals(Status.OK.getStatusCode(), conn.getResponseCode());
         BufferedReader reader = null;
         try {
-          reader = new BufferedReader(new InputStreamReader(response, "UTF8"));
+          reader = new BufferedReader(new InputStreamReader(response, 
StandardCharsets.UTF_8));
           for (String line; (line = reader.readLine()) != null;) {
             JSONObject obj = new JSONObject(line);
             if (obj.has("token")) {
@@ -432,7 +433,7 @@ public class TestRMWebServicesDelegationTokenAuthentication 
{
         InputStream response = conn.getInputStream();
         assertEquals(Status.OK.getStatusCode(), conn.getResponseCode());
         try (BufferedReader reader = new BufferedReader(new InputStreamReader(
-            response, "UTF8"))) {
+            response, StandardCharsets.UTF_8))) {
           String line;
           while ((line = reader.readLine()) != null) {
             JSONObject obj = new JSONObject(line);
@@ -490,7 +491,7 @@ public class TestRMWebServicesDelegationTokenAuthentication 
{
       conn.setRequestProperty("Content-Type", contentType + ";charset=UTF8");
       if (body != null && !body.isEmpty()) {
         OutputStream stream = conn.getOutputStream();
-        stream.write(body.getBytes("UTF8"));
+        stream.write(body.getBytes(StandardCharsets.UTF_8));
         stream.close();
       }
     }
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesHttpStaticUserPermissions.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesHttpStaticUserPermissions.java
index cef32f4c2bfe..dbd0e425b3dd 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesHttpStaticUserPermissions.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesHttpStaticUserPermissions.java
@@ -28,6 +28,7 @@ import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.net.HttpURLConnection;
 import java.net.URL;
+import java.nio.charset.StandardCharsets;
 import java.util.Map;
 import java.util.HashMap;
 
@@ -181,7 +182,7 @@ public class TestRMWebServicesHttpStaticUserPermissions {
         InputStream errorStream = conn.getErrorStream();
         String error = "";
         BufferedReader reader = new BufferedReader(
-            new InputStreamReader(errorStream, "UTF8"));
+            new InputStreamReader(errorStream, StandardCharsets.UTF_8));
         for (String line; (line = reader.readLine()) != null;) {
           error += line;
         }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org


Reply via email to