Browse Source

HADOOP-19041. Use StandardCharsets in more places (#6449)

PJ Fanning 1 year ago
parent
commit
97c5a6efba
62 changed files with 166 additions and 151 deletions
  1. 3 8
      hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
  2. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
  3. 4 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java
  4. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
  5. 2 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
  6. 1 1
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java
  7. 3 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java
  8. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
  9. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java
  10. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java
  11. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java
  12. 3 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java
  13. 7 7
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java
  14. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
  15. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
  16. 4 4
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java
  17. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java
  18. 4 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSPermissionChecker.java
  19. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
  20. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java
  21. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java
  22. 13 12
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobResourceUploaderWithSharedCache.java
  23. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java
  24. 2 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java
  25. 2 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java
  26. 3 6
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java
  27. 4 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java
  28. 4 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java
  29. 2 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRKeyValueTextInputFormat.java
  30. 2 1
      hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/TestMojo.java
  31. 1 1
      hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java
  32. 2 6
      hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/SharedKeyCredentials.java
  33. 1 1
      hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java
  34. 2 1
      hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/ITestProvidedImplementation.java
  35. 1 1
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/ExecutionSummarizer.java
  36. 1 1
      hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamUtil.java
  37. 2 5
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamDataProtocol.java
  38. 2 5
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamReduceNone.java
  39. 4 3
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java
  40. 2 1
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreaming.java
  41. 2 1
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingKeyValue.java
  42. 2 5
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingSeparator.java
  43. 2 1
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
  44. 3 3
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/mapreduce/TestStreamXmlRecordReader.java
  45. 2 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestTopCLI.java
  46. 2 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
  47. 3 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java
  48. 2 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
  49. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/amrmproxy/FederationInterceptor.java
  50. 3 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsBlkioResourceHandlerImpl.java
  51. 2 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsCpuResourceHandlerImpl.java
  52. 5 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsHandlerImpl.java
  53. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java
  54. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/DefaultLinuxContainerRuntime.java
  55. 5 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/DockerClient.java
  56. 2 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/gpu/NvidiaDockerV1CommandPlugin.java
  57. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/CgroupsLCEResourcesHandler.java
  58. 3 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java
  59. 1 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java
  60. 2 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java
  61. 5 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java
  62. 2 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesHttpStaticUserPermissions.java

+ 3 - 8
hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java

@@ -21,12 +21,11 @@ import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
 
 import java.io.File;
 import java.io.IOException;
-import java.io.UnsupportedEncodingException;
 import java.lang.reflect.InvocationTargetException;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.nio.ByteBuffer;
-import java.nio.charset.IllegalCharsetNameException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.Iterator;
@@ -426,12 +425,8 @@ public class KerberosUtil {
     }
 
     String getAsString() {
-      try {
-        return new String(bb.array(), bb.arrayOffset() + bb.position(),
-            bb.remaining(), "UTF-8");
-      } catch (UnsupportedEncodingException e) {
-        throw new IllegalCharsetNameException("UTF-8"); // won't happen.
-      }
+      return new String(bb.array(), bb.arrayOffset() + bb.position(),
+          bb.remaining(), StandardCharsets.UTF_8);
     }
 
     @Override

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java

@@ -3565,7 +3565,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    * @throws IOException raised on errors performing I/O.
    */
   public void writeXml(OutputStream out) throws IOException {
-    writeXml(new OutputStreamWriter(out, "UTF-8"));
+    writeXml(new OutputStreamWriter(out, StandardCharsets.UTF_8));
   }
 
   public void writeXml(Writer out) throws IOException {

+ 4 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java

@@ -18,6 +18,7 @@
 package org.apache.hadoop.fs;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.commons.codec.DecoderException;
 import org.apache.commons.codec.binary.Base64;
@@ -76,7 +77,7 @@ public enum XAttrCodec {
         String en = value.substring(0, 2);
         if (value.startsWith("\"") && value.endsWith("\"")) {
           value = value.substring(1, value.length()-1);
-          result = value.getBytes("utf-8");
+          result = value.getBytes(StandardCharsets.UTF_8);
         } else if (en.equalsIgnoreCase(HEX_PREFIX)) {
           value = value.substring(2, value.length());
           try {
@@ -90,7 +91,7 @@ public enum XAttrCodec {
         }
       }
       if (result == null) {
-        result = value.getBytes("utf-8");
+        result = value.getBytes(StandardCharsets.UTF_8);
       }
     }
     return result;
@@ -114,7 +115,7 @@ public enum XAttrCodec {
     } else if (encoding == BASE64) {
       return BASE64_PREFIX + base64.encodeToString(value);
     } else {
-      return "\"" + new String(value, "utf-8") + "\"";
+      return "\"" + new String(value, StandardCharsets.UTF_8) + "\"";
     }
   }
 }

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java

@@ -387,7 +387,7 @@ public class TestConfiguration {
       String name = "multi_byte_\u611b_name";
       String value = "multi_byte_\u0641_value";
       out = new BufferedWriter(new OutputStreamWriter(
-        new FileOutputStream(CONFIG_MULTI_BYTE), "UTF-8"));
+        new FileOutputStream(CONFIG_MULTI_BYTE), StandardCharsets.UTF_8));
       startConfig();
       declareProperty(name, value, value);
       endConfig();

+ 2 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java

@@ -32,6 +32,7 @@ import static org.apache.hadoop.fs.FileSystemTestHelper.*;
 
 import java.io.*;
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.EnumSet;
@@ -673,7 +674,7 @@ public class TestLocalFileSystem {
           fileSys.createFile(path).recursive();
       FSDataOutputStream out = builder.build();
       String content = "Create with a generic type of createFile!";
-      byte[] contentOrigin = content.getBytes("UTF8");
+      byte[] contentOrigin = content.getBytes(StandardCharsets.UTF_8);
       out.write(contentOrigin);
       out.close();
 

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java

@@ -106,7 +106,7 @@ public class HttpFSUtils {
     if (multiValuedParams != null) {
       for (Map.Entry<String, List<String>> multiValuedEntry : 
         multiValuedParams.entrySet()) {
-        String name = URLEncoder.encode(multiValuedEntry.getKey(), "UTF8");
+        String name = URLEncoder.encode(multiValuedEntry.getKey(), "UTF-8");
         List<String> values = multiValuedEntry.getValue();
         for (String value : values) {
           sb.append(separator).append(name).append("=").

+ 3 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java

@@ -26,6 +26,7 @@ import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Base64;
 import java.util.Iterator;
@@ -148,7 +149,7 @@ public class TextFileRegionAliasMap
     }
     OutputStream tmp = fs.create(file);
     java.io.Writer out = new BufferedWriter(new OutputStreamWriter(
-          (null == codec) ? tmp : codec.createOutputStream(tmp), "UTF-8"));
+          (null == codec) ? tmp : codec.createOutputStream(tmp), StandardCharsets.UTF_8));
     return new TextWriter(out, delim);
   }
 
@@ -379,7 +380,7 @@ public class TextFileRegionAliasMap
       FRIterator i = new FRIterator();
       try {
         BufferedReader r =
-            new BufferedReader(new InputStreamReader(createStream(), "UTF-8"));
+            new BufferedReader(new InputStreamReader(createStream(), StandardCharsets.UTF_8));
         iterators.put(i, r);
         i.pending = nextInternal(i);
       } catch (IOException e) {

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java

@@ -29,6 +29,7 @@ import java.io.InterruptedIOException;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayDeque;
 import java.util.Arrays;
 import java.util.Queue;
@@ -1063,7 +1064,7 @@ class BlockReceiver implements Closeable {
           // send a special ack upstream.
           if (datanode.isRestarting() && isClient && !isTransfer) {
             try (Writer out = new OutputStreamWriter(
-                replicaInfo.createRestartMetaStream(), "UTF-8")) {
+                replicaInfo.createRestartMetaStream(), StandardCharsets.UTF_8)) {
               // write out the current time.
               out.write(Long.toString(Time.now() + restartBudget));
               out.flush();

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java

@@ -28,6 +28,7 @@ import java.io.InputStream;
 import java.io.OutputStreamWriter;
 import java.io.RandomAccessFile;
 import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -399,7 +400,7 @@ public class BlockPoolSlice {
     try {
       long used = getDfsUsed();
       try (Writer out = new OutputStreamWriter(
-          Files.newOutputStream(outFile.toPath()), "UTF-8")) {
+          Files.newOutputStream(outFile.toPath()), StandardCharsets.UTF_8)) {
         // mtime is written last, so that truncated writes won't be valid.
         out.write(Long.toString(used) + " " + Long.toString(timer.now()));
         // This is only called as part of the volume shutdown.

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java

@@ -27,6 +27,7 @@ import java.io.OutputStreamWriter;
 import java.io.RandomAccessFile;
 import java.net.URI;
 import java.nio.channels.ClosedChannelException;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Paths;
 import java.nio.file.StandardCopyOption;
 import java.util.Collection;
@@ -929,7 +930,7 @@ public class FsVolumeImpl implements FsVolumeSpi {
       boolean success = false;
       try (BufferedWriter writer = new BufferedWriter(
           new OutputStreamWriter(fileIoProvider.getFileOutputStream(
-              FsVolumeImpl.this, getTempSaveFile()), "UTF-8"))) {
+              FsVolumeImpl.this, getTempSaveFile()), StandardCharsets.UTF_8))) {
         WRITER.writeValue(writer, state);
         success = true;
       } finally {

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java

@@ -66,6 +66,7 @@ import java.io.IOException;
 import java.io.InputStreamReader;
 import java.net.InetSocketAddress;
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.text.DateFormat;
 import java.util.*;
 import java.util.concurrent.TimeUnit;
@@ -740,7 +741,7 @@ public class Mover {
     private static String[] readPathFile(String file) throws IOException {
       List<String> list = Lists.newArrayList();
       BufferedReader reader = new BufferedReader(
-          new InputStreamReader(new FileInputStream(file), "UTF-8"));
+          new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8));
       try {
         String line;
         while ((line = reader.readLine()) != null) {

+ 3 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java

@@ -26,6 +26,7 @@ import java.net.URI;
 import java.net.URL;
 import java.net.URLConnection;
 import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
 import java.security.PrivilegedExceptionAction;
 import java.util.concurrent.TimeUnit;
 
@@ -207,7 +208,7 @@ public class DFSck extends Configured implements Tool {
       }
       InputStream stream = connection.getInputStream();
       BufferedReader input = new BufferedReader(new InputStreamReader(
-          stream, "UTF-8"));
+          stream, StandardCharsets.UTF_8));
       try {
         String line = null;
         while ((line = input.readLine()) != null) {
@@ -376,7 +377,7 @@ public class DFSck extends Configured implements Tool {
     }
     InputStream stream = connection.getInputStream();
     BufferedReader input = new BufferedReader(new InputStreamReader(
-                                              stream, "UTF-8"));
+                                              stream, StandardCharsets.UTF_8));
     String line = null;
     String lastLine = NamenodeFsck.CORRUPT_STATUS;
     int errCode = -1;

+ 7 - 7
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java

@@ -622,7 +622,7 @@ class OfflineImageReconstructor {
     inodeBld.setId(id);
     String name = node.removeChildStr(SECTION_NAME);
     if (name != null) {
-      inodeBld.setName(ByteString.copyFrom(name, "UTF8"));
+      inodeBld.setName(ByteString.copyFrom(name, StandardCharsets.UTF_8));
     }
     switch (type) {
     case "FILE":
@@ -838,7 +838,7 @@ class OfflineImageReconstructor {
     }
     String target = node.removeChildStr(INODE_SECTION_TARGET);
     if (target != null) {
-      bld.setTarget(ByteString.copyFrom(target, "UTF8"));
+      bld.setTarget(ByteString.copyFrom(target, StandardCharsets.UTF_8));
     }
     Long lval = node.removeChildLong(INODE_SECTION_MTIME);
     if (lval != null) {
@@ -900,7 +900,7 @@ class OfflineImageReconstructor {
         }
         val = new HexBinaryAdapter().unmarshal(valHex);
       } else {
-        val = valStr.getBytes("UTF8");
+        val = valStr.getBytes(StandardCharsets.UTF_8);
       }
       b.setValue(ByteString.copyFrom(val));
 
@@ -1232,7 +1232,7 @@ class OfflineImageReconstructor {
         }
         String name = inodeRef.removeChildStr("name");
         if (name != null) {
-          bld.setName(ByteString.copyFrom(name, "UTF8"));
+          bld.setName(ByteString.copyFrom(name, StandardCharsets.UTF_8));
         }
         Integer dstSnapshotId = inodeRef.removeChildInt(
             INODE_REFERENCE_SECTION_DST_SNAPSHOT_ID);
@@ -1468,7 +1468,7 @@ class OfflineImageReconstructor {
         bld.setChildrenSize(childrenSize);
         String name = dirDiff.removeChildStr(SECTION_NAME);
         if (name != null) {
-          bld.setName(ByteString.copyFrom(name, "UTF8"));
+          bld.setName(ByteString.copyFrom(name, StandardCharsets.UTF_8));
         }
         Node snapshotCopy = dirDiff.removeChild(
             SNAPSHOT_DIFF_SECTION_SNAPSHOT_COPY);
@@ -1514,7 +1514,7 @@ class OfflineImageReconstructor {
           }
           created.verifyNoRemainingKeys("created");
           FsImageProto.SnapshotDiffSection.CreatedListEntry.newBuilder().
-              setName(ByteString.copyFrom(cleName, "UTF8")).
+              setName(ByteString.copyFrom(cleName, StandardCharsets.UTF_8)).
               build().writeDelimitedTo(out);
           actualCreatedListSize++;
         }
@@ -1571,7 +1571,7 @@ class OfflineImageReconstructor {
         }
         String name = fileDiff.removeChildStr(SECTION_NAME);
         if (name != null) {
-          bld.setName(ByteString.copyFrom(name, "UTF8"));
+          bld.setName(ByteString.copyFrom(name, StandardCharsets.UTF_8));
         }
         Node snapshotCopy = fileDiff.removeChild(
             SNAPSHOT_DIFF_SECTION_SNAPSHOT_COPY);

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java

@@ -18,6 +18,7 @@
 package org.apache.hadoop.hdfs;
 
 import java.io.*;
+import java.nio.charset.StandardCharsets;
 import java.security.Permission;
 import java.security.PrivilegedExceptionAction;
 import java.text.SimpleDateFormat;
@@ -1917,7 +1918,7 @@ public class TestDFSShell {
     char c = content.charAt(0);
     sb.setCharAt(0, ++c);
     for(MaterializedReplica replica : replicas) {
-      replica.corruptData(sb.toString().getBytes("UTF8"));
+      replica.corruptData(sb.toString().getBytes(StandardCharsets.UTF_8));
     }
   }
 

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java

@@ -45,6 +45,7 @@ import java.net.InetSocketAddress;
 import java.net.ServerSocket;
 import java.net.SocketTimeoutException;
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.security.NoSuchAlgorithmException;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
@@ -1885,7 +1886,7 @@ public class TestDistributedFileSystem {
           .replication((short) 1)
           .blockSize(4096)
           .build()) {
-        byte[] contentOrigin = content.getBytes("UTF8");
+        byte[] contentOrigin = content.getBytes(StandardCharsets.UTF_8);
         out1.write(contentOrigin);
       }
 

+ 4 - 4
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java

@@ -194,10 +194,10 @@ public class UpgradeUtilities {
    */
   public static Configuration initializeStorageStateConf(int numDirs,
                                                          Configuration conf) {
-    StringBuffer nameNodeDirs =
-      new StringBuffer(new File(TEST_ROOT_DIR, "name1").toString());
-    StringBuffer dataNodeDirs =
-      new StringBuffer(new File(TEST_ROOT_DIR, "data1").toString());
+    StringBuilder nameNodeDirs =
+        new StringBuilder(new File(TEST_ROOT_DIR, "name1").toString());
+    StringBuilder dataNodeDirs =
+        new StringBuilder(new File(TEST_ROOT_DIR, "data1").toString());
     for (int i = 2; i <= numDirs; i++) {
       nameNodeDirs.append("," + new File(TEST_ROOT_DIR, "name"+i));
       dataNodeDirs.append("," + new File(TEST_ROOT_DIR, "data"+i));

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java

@@ -33,6 +33,7 @@ import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.net.URI;
 import java.net.URISyntaxException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
@@ -147,7 +148,7 @@ public class TestProvidedImpl {
                 newFile.getAbsolutePath());
             newFile.createNewFile();
             Writer writer = new OutputStreamWriter(
-                new FileOutputStream(newFile.getAbsolutePath()), "utf-8");
+                new FileOutputStream(newFile.getAbsolutePath()), StandardCharsets.UTF_8);
             for(int i=0; i< BLK_LEN/(Integer.SIZE/8); i++) {
               writer.write(currentCount);
             }

+ 4 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSPermissionChecker.java

@@ -39,6 +39,7 @@ import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.mock;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.function.LongFunction;
 
@@ -433,7 +434,7 @@ public class TestFSPermissionChecker {
     PermissionStatus permStatus = PermissionStatus.createImmutable(owner, group,
       FsPermission.createImmutable(perm));
     INodeDirectory inodeDirectory = new INodeDirectory(
-      HdfsConstants.GRANDFATHER_INODE_ID, name.getBytes("UTF-8"), permStatus, 0L);
+        HdfsConstants.GRANDFATHER_INODE_ID, name.getBytes(StandardCharsets.UTF_8), permStatus, 0L);
     parent.addChild(inodeDirectory);
     return inodeDirectory;
   }
@@ -443,8 +444,8 @@ public class TestFSPermissionChecker {
     PermissionStatus permStatus = PermissionStatus.createImmutable(owner, group,
       FsPermission.createImmutable(perm));
     INodeFile inodeFile = new INodeFile(HdfsConstants.GRANDFATHER_INODE_ID,
-      name.getBytes("UTF-8"), permStatus, 0L, 0L, null, REPLICATION,
-      PREFERRED_BLOCK_SIZE);
+        name.getBytes(StandardCharsets.UTF_8), permStatus, 0L, 0L, null,
+        REPLICATION, PREFERRED_BLOCK_SIZE);
     parent.addChild(inodeFile);
     return inodeFile;
   }

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java

@@ -316,7 +316,7 @@ public class TestWebHdfsFileSystemContract extends FileSystemContractBaseTest {
     String content = "testLengthParamLongerThanFile";
     FSDataOutputStream testFileOut = webhdfs.create(testFile);
     try {
-      testFileOut.write(content.getBytes("US-ASCII"));
+      testFileOut.write(content.getBytes(StandardCharsets.US_ASCII));
     } finally {
       IOUtils.closeStream(testFileOut);
     }
@@ -366,7 +366,7 @@ public class TestWebHdfsFileSystemContract extends FileSystemContractBaseTest {
     String content = "testOffsetPlusLengthParamsLongerThanFile";
     FSDataOutputStream testFileOut = webhdfs.create(testFile);
     try {
-      testFileOut.write(content.getBytes("US-ASCII"));
+      testFileOut.write(content.getBytes(StandardCharsets.US_ASCII));
     } finally {
       IOUtils.closeStream(testFileOut);
     }

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java

@@ -210,7 +210,7 @@ public abstract class RMCommunicator extends AbstractService
         || jobImpl.getInternalState() == JobStateInternal.ERROR) {
       finishState = FinalApplicationStatus.FAILED;
     }
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     for (String s : job.getDiagnostics()) {
       sb.append(s).append("\n");
     }

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java

@@ -120,7 +120,7 @@ public class JobInfo {
 
       List<String> diagnostics = job.getDiagnostics();
       if (diagnostics != null && !diagnostics.isEmpty()) {
-        StringBuffer b = new StringBuffer();
+        StringBuilder b = new StringBuilder();
         for (String diag : diagnostics) {
           b.append(diag);
         }

+ 13 - 12
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobResourceUploaderWithSharedCache.java

@@ -32,6 +32,7 @@ import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.Map;
 import java.util.jar.JarOutputStream;
 import java.util.zip.ZipEntry;
@@ -339,12 +340,12 @@ public class TestJobResourceUploaderWithSharedCache {
       IOException {
     FileOutputStream fos =
         new FileOutputStream(new File(p.toUri().getPath()));
-    JarOutputStream jos = new JarOutputStream(fos);
-    ZipEntry ze = new ZipEntry("distributed.jar.inside" + index);
-    jos.putNextEntry(ze);
-    jos.write(("inside the jar!" + index).getBytes());
-    jos.closeEntry();
-    jos.close();
+    try (JarOutputStream jos = new JarOutputStream(fos)) {
+      ZipEntry ze = new ZipEntry("distributed.jar.inside" + index);
+      jos.putNextEntry(ze);
+      jos.write(("inside the jar!" + index).getBytes());
+      jos.closeEntry();
+    }
     localFs.setPermission(p, new FsPermission("700"));
     return p;
   }
@@ -354,12 +355,12 @@ public class TestJobResourceUploaderWithSharedCache {
     Path archive = new Path(testRootDir, archiveFile);
     Path file = new Path(testRootDir, filename);
     DataOutputStream out = localFs.create(archive);
-    ZipOutputStream zos = new ZipOutputStream(out);
-    ZipEntry ze = new ZipEntry(file.toString());
-    zos.putNextEntry(ze);
-    zos.write(input.getBytes("UTF-8"));
-    zos.closeEntry();
-    zos.close();
+    try (ZipOutputStream zos = new ZipOutputStream(out)) {
+      ZipEntry ze = new ZipEntry(file.toString());
+      zos.putNextEntry(ze);
+      zos.write(input.getBytes(StandardCharsets.UTF_8));
+      zos.closeEntry();
+    }
     return archive;
   }
 }

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java

@@ -92,7 +92,7 @@ public class TestConcatenatedCompressedInput {
 
   private static LineReader makeStream(String str) throws IOException {
     return new LineReader(new ByteArrayInputStream(
-            str.getBytes("UTF-8")), defaultConf);
+            str.getBytes(StandardCharsets.UTF_8)), defaultConf);
   }
 
   private static void writeFile(FileSystem fs, Path name,

+ 2 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java

@@ -26,6 +26,7 @@ import org.apache.hadoop.mapreduce.lib.fieldsel.TestMRFieldSelection;
 import org.junit.Test;
 import static org.junit.Assert.assertEquals;
 
+import java.nio.charset.StandardCharsets;
 import java.text.NumberFormat;
 
 public class TestFieldSelection {
@@ -60,7 +61,7 @@ private static NumberFormat idFormat = NumberFormat.getInstance();
     TestMRFieldSelection.constructInputOutputData(inputData,
       expectedOutput, numOfInputLines);
     FSDataOutputStream fileOut = fs.create(new Path(INPUT_DIR, inputFile));
-    fileOut.write(inputData.toString().getBytes("utf-8"));
+    fileOut.write(inputData.toString().getBytes(StandardCharsets.UTF_8));
     fileOut.close();
 
     System.out.println("inputData:");

+ 2 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java

@@ -30,6 +30,7 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 
@@ -134,9 +135,7 @@ public class TestKeyValueTextInputFormat {
     }
   }
   private LineReader makeStream(String str) throws IOException {
-    return new LineReader(new ByteArrayInputStream
-                                           (str.getBytes("UTF-8")), 
-                                           defaultConf);
+    return new LineReader(new ByteArrayInputStream(str.getBytes(UTF_8)), defaultConf);
   }
   @Test
   public void testUTF8() throws Exception {

+ 3 - 6
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java

@@ -42,6 +42,7 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.junit.Assert.*;
 
 public class TestTextInputFormat {
@@ -330,14 +331,10 @@ public class TestTextInputFormat {
   }
 
   private static LineReader makeStream(String str) throws IOException {
-    return new LineReader(new ByteArrayInputStream
-                                             (str.getBytes("UTF-8")), 
-                                           defaultConf);
+    return new LineReader(new ByteArrayInputStream(str.getBytes(UTF_8)), defaultConf);
   }
   private static LineReader makeStream(String str, int bufsz) throws IOException {
-    return new LineReader(new ByteArrayInputStream
-                                             (str.getBytes("UTF-8")), 
-                                           bufsz);
+    return new LineReader(new ByteArrayInputStream(str.getBytes(UTF_8)), bufsz);
   }
 
   @Test (timeout=5000)

+ 4 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java

@@ -26,6 +26,7 @@ import org.junit.Test;
 import static org.junit.Assert.assertEquals;
 
 import java.io.*;
+import java.nio.charset.StandardCharsets;
 import java.util.*;
 import java.text.NumberFormat;
 
@@ -55,8 +56,8 @@ public class TestAggregates {
     fs.mkdirs(INPUT_DIR);
     fs.delete(OUTPUT_DIR, true);
 
-    StringBuffer inputData = new StringBuffer();
-    StringBuffer expectedOutput = new StringBuffer();
+    StringBuilder inputData = new StringBuilder();
+    StringBuilder expectedOutput = new StringBuilder();
     expectedOutput.append("max\t19\n");
     expectedOutput.append("min\t1\n"); 
 
@@ -76,7 +77,7 @@ public class TestAggregates {
     expectedOutput.append("uniq_count\t15\n");
 
 
-    fileOut.write(inputData.toString().getBytes("utf-8"));
+    fileOut.write(inputData.toString().getBytes(StandardCharsets.UTF_8));
     fileOut.close();
 
     System.out.println("inputData:");

+ 4 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java

@@ -30,6 +30,7 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
 import org.junit.Test;
 
+import java.nio.charset.StandardCharsets;
 import java.text.NumberFormat;
 
 import static org.junit.Assert.assertEquals;
@@ -61,8 +62,8 @@ public class TestMapReduceAggregates {
     fs.mkdirs(INPUT_DIR);
     fs.delete(OUTPUT_DIR, true);
 
-    StringBuffer inputData = new StringBuffer();
-    StringBuffer expectedOutput = new StringBuffer();
+    StringBuilder inputData = new StringBuilder();
+    StringBuilder expectedOutput = new StringBuilder();
     expectedOutput.append("max\t19\n");
     expectedOutput.append("min\t1\n"); 
 
@@ -82,7 +83,7 @@ public class TestMapReduceAggregates {
     expectedOutput.append("uniq_count\t15\n");
 
 
-    fileOut.write(inputData.toString().getBytes("utf-8"));
+    fileOut.write(inputData.toString().getBytes(StandardCharsets.UTF_8));
     fileOut.close();
 
     System.out.println("inputData:");

+ 2 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRKeyValueTextInputFormat.java

@@ -45,6 +45,7 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.junit.Assert.*;
 
 public class TestMRKeyValueTextInputFormat {
@@ -253,9 +254,7 @@ public class TestMRKeyValueTextInputFormat {
   }
 
   private LineReader makeStream(String str) throws IOException {
-    return new LineReader(new ByteArrayInputStream
-                                           (str.getBytes("UTF-8")), 
-                                           defaultConf);
+    return new LineReader(new ByteArrayInputStream(str.getBytes(UTF_8)), defaultConf);
   }
   
   @Test

+ 2 - 1
hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/TestMojo.java

@@ -14,6 +14,7 @@
 
 package org.apache.hadoop.maven.plugin.cmakebuilder;
 
+import java.nio.charset.StandardCharsets;
 import java.util.Locale;
 import org.apache.hadoop.maven.plugin.util.Exec;
 import org.apache.maven.execution.MavenSession;
@@ -165,7 +166,7 @@ public class TestMojo extends AbstractMojo {
                 testName + ".pstatus"));
     BufferedWriter out = null;
     try {
-      out = new BufferedWriter(new OutputStreamWriter(fos, "UTF8"));
+      out = new BufferedWriter(new OutputStreamWriter(fos, StandardCharsets.UTF_8));
       out.write(status + "\n");
     } finally {
       if (out != null) {

+ 1 - 1
hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java

@@ -691,7 +691,7 @@ public class HadoopArchives implements Tool {
       if (value.isDir()) { 
         towrite = encodeName(relPath.toString())
                   + " dir " + propStr + " 0 0 ";
-        StringBuffer sbuff = new StringBuffer();
+        StringBuilder sbuff = new StringBuilder();
         sbuff.append(towrite);
         for (String child: value.children) {
           sbuff.append(encodeName(child) + " ");

+ 2 - 6
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/SharedKeyCredentials.java

@@ -24,6 +24,7 @@ import java.io.UnsupportedEncodingException;
 import java.net.HttpURLConnection;
 import java.net.URL;
 import java.net.URLDecoder;
+import java.nio.charset.StandardCharsets;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
@@ -98,12 +99,7 @@ public class SharedKeyCredentials {
   }
 
   private String computeHmac256(final String stringToSign) {
-    byte[] utf8Bytes;
-    try {
-      utf8Bytes = stringToSign.getBytes(AbfsHttpConstants.UTF_8);
-    } catch (final UnsupportedEncodingException e) {
-      throw new IllegalArgumentException(e);
-    }
+    byte[] utf8Bytes = stringToSign.getBytes(StandardCharsets.UTF_8);
     byte[] hmac;
     synchronized (this) {
       hmac = hmacSha256.doFinal(utf8Bytes);

+ 1 - 1
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java

@@ -390,7 +390,7 @@ public class ITestAzureBlobFileSystemDelegationSAS extends AbstractAbfsIntegrati
     fs.create(reqPath).close();
 
     final String propertyName = "user.mime_type";
-    final byte[] propertyValue = "text/plain".getBytes("utf-8");
+    final byte[] propertyValue = "text/plain".getBytes(StandardCharsets.UTF_8);
     fs.setXAttr(reqPath, propertyName, propertyValue);
 
     assertArrayEquals(propertyValue, fs.getXAttr(reqPath, propertyName));

+ 2 - 1
hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/ITestProvidedImplementation.java

@@ -28,6 +28,7 @@ import java.net.URI;
 import java.nio.ByteBuffer;
 import java.nio.channels.Channels;
 import java.nio.channels.ReadableByteChannel;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -180,7 +181,7 @@ public class ITestProvidedImplementation {
           LOG.info("Creating " + newFile.toString());
           newFile.createNewFile();
           Writer writer = new OutputStreamWriter(
-              new FileOutputStream(newFile.getAbsolutePath()), "utf-8");
+              new FileOutputStream(newFile.getAbsolutePath()), StandardCharsets.UTF_8);
           for(int j=0; j < baseFileLen*i; j++) {
             writer.write("0");
           }

+ 1 - 1
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/ExecutionSummarizer.java

@@ -222,7 +222,7 @@ class ExecutionSummarizer implements StatListener<JobStats> {
   // Gets the stringified version of DataStatistics
   static String stringifyDataStatistics(DataStatistics stats) {
     if (stats != null) {
-      StringBuffer buffer = new StringBuffer();
+      StringBuilder buffer = new StringBuilder();
       String compressionStatus = stats.isDataCompressed() 
                                  ? "Compressed" 
                                  : "Uncompressed";

+ 1 - 1
hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamUtil.java

@@ -128,7 +128,7 @@ public class StreamUtil {
   static final String regexpSpecials = "[]()?*+|.!^-\\~@";
 
   public static String regexpEscape(String plain) {
-    StringBuffer buf = new StringBuffer();
+    StringBuilder buf = new StringBuilder();
     char[] ch = plain.toCharArray();
     int csup = ch.length;
     for (int c = 0; c < csup; c++) {

+ 2 - 5
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamDataProtocol.java

@@ -19,11 +19,8 @@
 package org.apache.hadoop.streaming;
 
 import java.io.*;
-import java.util.*;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
+import java.nio.charset.StandardCharsets;
 import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner;
 
 import org.junit.Test;
@@ -59,7 +56,7 @@ public class TestStreamDataProtocol
   {
     DataOutputStream out = new DataOutputStream(
                                                 new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
-    out.write(input.getBytes("UTF-8"));
+    out.write(input.getBytes(StandardCharsets.UTF_8));
     out.close();
   }
 

+ 2 - 5
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamReduceNone.java

@@ -19,11 +19,8 @@
 package org.apache.hadoop.streaming;
 
 import java.io.*;
-import java.util.*;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
+import java.nio.charset.StandardCharsets;
 import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
 
 import static org.junit.Assert.*;
 import org.junit.Test;
@@ -56,7 +53,7 @@ public class TestStreamReduceNone
   {
     DataOutputStream out = new DataOutputStream(
                                                 new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
-    out.write(input.getBytes("UTF-8"));
+    out.write(input.getBytes(StandardCharsets.UTF_8));
     out.close();
   }
 

+ 4 - 3
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java

@@ -21,6 +21,7 @@ package org.apache.hadoop.streaming;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
 /**
  * This class tests StreamXmlRecordReader
@@ -44,9 +45,9 @@ public class TestStreamXmlRecordReader extends TestStreaming {
     FileOutputStream out = new FileOutputStream(INPUT_FILE.getAbsoluteFile());
     String dummyXmlStartTag = "<PATTERN>\n";
     String dummyXmlEndTag = "</PATTERN>\n";
-    out.write(dummyXmlStartTag.getBytes("UTF-8"));
-    out.write(input.getBytes("UTF-8"));
-    out.write(dummyXmlEndTag.getBytes("UTF-8"));
+    out.write(dummyXmlStartTag.getBytes(StandardCharsets.UTF_8));
+    out.write(input.getBytes(StandardCharsets.UTF_8));
+    out.write(dummyXmlEndTag.getBytes(StandardCharsets.UTF_8));
     out.close();
   }
 

+ 2 - 1
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreaming.java

@@ -19,6 +19,7 @@
 package org.apache.hadoop.streaming;
 
 import java.io.*;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashSet;
@@ -120,7 +121,7 @@ public class TestStreaming
   {
     DataOutputStream out = getFileSystem().create(new Path(
       INPUT_FILE.getPath()));
-    out.write(getInputData().getBytes("UTF-8"));
+    out.write(getInputData().getBytes(StandardCharsets.UTF_8));
     out.close();
   }
 

+ 2 - 1
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingKeyValue.java

@@ -22,6 +22,7 @@ import org.junit.Test;
 import static org.junit.Assert.*;
 
 import java.io.*;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.mapreduce.MRJobConfig;
@@ -68,7 +69,7 @@ public class TestStreamingKeyValue
   {
     DataOutputStream out = new DataOutputStream(
        new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
-    out.write(input.getBytes("UTF-8"));
+    out.write(input.getBytes(StandardCharsets.UTF_8));
     out.close();
   }
 

+ 2 - 5
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingSeparator.java

@@ -22,11 +22,8 @@ import org.junit.Test;
 import static org.junit.Assert.*;
 
 import java.io.*;
-import java.util.*;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
+import java.nio.charset.StandardCharsets;
 import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
 
 /**
  * This class tests hadoopStreaming with customized separator in MapReduce local mode.
@@ -64,7 +61,7 @@ public class TestStreamingSeparator
   {
     DataOutputStream out = new DataOutputStream(
                                                 new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
-    out.write(input.getBytes("UTF-8"));
+    out.write(input.getBytes(StandardCharsets.UTF_8));
     out.close();
   }
 

+ 2 - 1
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestTypedBytesStreaming.java

@@ -22,6 +22,7 @@ import java.io.DataOutputStream;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
@@ -48,7 +49,7 @@ public class TestTypedBytesStreaming {
 
   protected void createInput() throws IOException {
     DataOutputStream out = new DataOutputStream(new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
-    out.write(input.getBytes("UTF-8"));
+    out.write(input.getBytes(StandardCharsets.UTF_8));
     out.close();
   }
 

+ 3 - 3
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/mapreduce/TestStreamXmlRecordReader.java

@@ -101,9 +101,9 @@ public class TestStreamXmlRecordReader {
     FileOutputStream out = new FileOutputStream(INPUT_FILE.getAbsoluteFile());
     String dummyXmlStartTag = "<PATTERN>\n";
     String dummyXmlEndTag = "</PATTERN>\n";
-    out.write(dummyXmlStartTag.getBytes("UTF-8"));
-    out.write(input.getBytes("UTF-8"));
-    out.write(dummyXmlEndTag.getBytes("UTF-8"));
+    out.write(dummyXmlStartTag.getBytes(StandardCharsets.UTF_8));
+    out.write(input.getBytes(StandardCharsets.UTF_8));
+    out.write(dummyXmlEndTag.getBytes(StandardCharsets.UTF_8));
     out.close();
   }
 

+ 2 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestTopCLI.java

@@ -25,6 +25,7 @@ import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
 import java.net.URL;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
@@ -157,7 +158,7 @@ public class TestTopCLI {
       System.setErr(out);
       topcli.showTopScreen();
       out.flush();
-      actual = outStream.toString("UTF-8");
+      actual = outStream.toString(StandardCharsets.UTF_8.name());
     }
 
     String expected = "NodeManager(s)"

+ 2 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java

@@ -38,6 +38,7 @@ import java.io.OutputStreamWriter;
 import java.io.PrintStream;
 import java.io.PrintWriter;
 import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
 import java.text.DecimalFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -364,7 +365,7 @@ public class TestYarnCLI {
     verify(client).getContainers(attemptId);
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     OutputStreamWriter stream =
-        new OutputStreamWriter(baos, "UTF-8");
+        new OutputStreamWriter(baos, StandardCharsets.UTF_8);
     PrintWriter pw = new PrintWriter(stream);
     pw.println("Total number of containers :3");
     pw.printf(ApplicationCLI.CONTAINER_PATTERN, "Container-Id", "Start Time",

+ 3 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java

@@ -30,6 +30,7 @@ import java.io.StringWriter;
 import java.io.UnsupportedEncodingException;
 import java.io.Writer;
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.nio.file.StandardOpenOption;
@@ -159,7 +160,7 @@ public class TestAggregatedLogFormat {
 
     File outputFile = new File(new File(srcFilePath.toString()), fileName);
     FileOutputStream os = new FileOutputStream(outputFile);
-    final OutputStreamWriter osw = new OutputStreamWriter(os, "UTF8");
+    final OutputStreamWriter osw = new OutputStreamWriter(os, StandardCharsets.UTF_8);
     final int ch = filler;
 
     UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
@@ -473,7 +474,7 @@ public class TestAggregatedLogFormat {
     }
     File outputFile = new File(new File(srcFilePath.toString()), fileName);
     FileOutputStream os = new FileOutputStream(outputFile);
-    OutputStreamWriter osw = new OutputStreamWriter(os, "UTF8");
+    OutputStreamWriter osw = new OutputStreamWriter(os, StandardCharsets.UTF_8);
     return osw;
   }
 }

+ 2 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java

@@ -24,6 +24,7 @@ import java.io.FileInputStream;
 import java.io.InputStream;
 import java.net.HttpURLConnection;
 import java.net.URL;
+import java.nio.charset.StandardCharsets;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -227,6 +228,6 @@ public class TestApplicationHistoryServer {
     while ((read = input.read(buffer)) >= 0) {
       data.write(buffer, 0, read);
     }
-    return new String(data.toByteArray(), "UTF-8");
+    return new String(data.toByteArray(), StandardCharsets.UTF_8);
   }
 }

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/amrmproxy/FederationInterceptor.java

@@ -21,6 +21,7 @@ package org.apache.hadoop.yarn.server.nodemanager.amrmproxy;
 import java.io.ByteArrayInputStream;
 import java.io.DataInputStream;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
@@ -585,7 +586,7 @@ public class FederationInterceptor extends AbstractRequestInterceptor {
         // entry for subClusterId -> UAM AMRMTokenIdentifier
         String scId = key.substring(NMSS_SECONDARY_SC_PREFIX.length());
         Token<AMRMTokenIdentifier> aMRMTokenIdentifier = new Token<>();
-        aMRMTokenIdentifier.decodeFromUrlString(new String(value, STRING_TO_BYTE_FORMAT));
+        aMRMTokenIdentifier.decodeFromUrlString(new String(value, StandardCharsets.UTF_8));
         uamMap.put(scId, aMRMTokenIdentifier);
         LOG.debug("Recovered UAM in {} from NMSS.", scId);
       }
@@ -1345,7 +1346,7 @@ public class FederationInterceptor extends AbstractRequestInterceptor {
           } else if (getNMStateStore() != null) {
             getNMStateStore().storeAMRMProxyAppContextEntry(attemptId,
                 NMSS_SECONDARY_SC_PREFIX + subClusterId,
-                token.encodeToUrlString().getBytes(STRING_TO_BYTE_FORMAT));
+                token.encodeToUrlString().getBytes(StandardCharsets.UTF_8));
           }
         } catch (Throwable e) {
           LOG.error("Failed to persist UAM token from {} Application {}",
@@ -1884,7 +1885,7 @@ public class FederationInterceptor extends AbstractRequestInterceptor {
           try {
             getNMStateStore().storeAMRMProxyAppContextEntry(attemptId,
                 NMSS_SECONDARY_SC_PREFIX + subClusterId.getId(),
-                newToken.encodeToUrlString().getBytes(STRING_TO_BYTE_FORMAT));
+                newToken.encodeToUrlString().getBytes(StandardCharsets.UTF_8));
           } catch (IOException e) {
             LOG.error("Error storing UAM token as AMRMProxy "
                 + "context entry in NMSS for {}.", attemptId, e);

+ 3 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsBlkioResourceHandlerImpl.java

@@ -31,6 +31,7 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.privileg
 
 import java.io.File;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.util.ArrayList;
@@ -73,7 +74,7 @@ public class CGroupsBlkioResourceHandlerImpl implements DiskResourceHandler {
     // are using the CFQ scheduler. If they aren't print a warning
     try {
       byte[] contents = Files.readAllBytes(Paths.get(PARTITIONS_FILE));
-      data = new String(contents, "UTF-8").trim();
+      data = new String(contents, StandardCharsets.UTF_8).trim();
     } catch (IOException e) {
       String msg = "Couldn't read " + PARTITIONS_FILE +
           "; can't determine disk scheduler type";
@@ -96,7 +97,7 @@ public class CGroupsBlkioResourceHandlerImpl implements DiskResourceHandler {
             if (schedulerFile.exists()) {
               try {
                 byte[] contents = Files.readAllBytes(Paths.get(schedulerPath));
-                String schedulerString = new String(contents, "UTF-8").trim();
+                String schedulerString = new String(contents, StandardCharsets.UTF_8).trim();
                 if (!schedulerString.contains("[cfq]")) {
                   LOG.warn("Device " + partition + " does not use the CFQ"
                       + " scheduler; disk isolation using "

+ 2 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsCpuResourceHandlerImpl.java

@@ -37,6 +37,7 @@ import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
 
 import java.io.File;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -132,7 +133,7 @@ public class CGroupsCpuResourceHandlerImpl implements CpuResourceHandler {
     File quotaFile = new File(path,
         CPU.getName() + "." + CGroupsHandler.CGROUP_CPU_QUOTA_US);
     if (quotaFile.exists()) {
-      String contents = FileUtils.readFileToString(quotaFile, "UTF-8");
+      String contents = FileUtils.readFileToString(quotaFile, StandardCharsets.UTF_8);
       int quotaUS = Integer.parseInt(contents.trim());
       if (quotaUS != -1) {
         return true;

+ 5 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsHandlerImpl.java

@@ -39,6 +39,7 @@ import org.apache.hadoop.yarn.util.Clock;
 import org.apache.hadoop.yarn.util.SystemClock;
 
 import java.io.*;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.util.Arrays;
@@ -216,7 +217,7 @@ class CGroupsHandlerImpl implements CGroupsHandler {
 
     try {
       FileInputStream fis = new FileInputStream(new File(mtab));
-      in = new BufferedReader(new InputStreamReader(fis, "UTF-8"));
+      in = new BufferedReader(new InputStreamReader(fis, StandardCharsets.UTF_8));
 
       for (String str = in.readLine(); str != null;
            str = in.readLine()) {
@@ -474,7 +475,7 @@ class CGroupsHandlerImpl implements CGroupsHandler {
     if (LOG.isDebugEnabled()) {
       try (BufferedReader inl =
           new BufferedReader(new InputStreamReader(new FileInputStream(cgf
-              + "/tasks"), "UTF-8"))) {
+              + "/tasks"), StandardCharsets.UTF_8))) {
         str = inl.readLine();
         if (str != null) {
           LOG.debug("First line in cgroup tasks file: {} {}", cgf, str);
@@ -559,7 +560,7 @@ class CGroupsHandlerImpl implements CGroupsHandler {
 
     try {
       File file = new File(cGroupParamPath);
-      Writer w = new OutputStreamWriter(new FileOutputStream(file), "UTF-8");
+      Writer w = new OutputStreamWriter(new FileOutputStream(file), StandardCharsets.UTF_8);
       pw = new PrintWriter(w);
       pw.write(value);
     } catch (IOException e) {
@@ -595,7 +596,7 @@ class CGroupsHandlerImpl implements CGroupsHandler {
 
     try {
       byte[] contents = Files.readAllBytes(Paths.get(cGroupParamPath));
-      return new String(contents, "UTF-8").trim();
+      return new String(contents, StandardCharsets.UTF_8).trim();
     } catch (IOException e) {
       throw new ResourceHandlerException(
           "Unable to read from " + cGroupParamPath);

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java

@@ -31,6 +31,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.*;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.BitSet;
 import java.util.HashMap;
@@ -619,9 +620,9 @@ import java.util.regex.Pattern;
             File(tmpDirPath));
 
         try (
-            Writer writer = new OutputStreamWriter(new FileOutputStream(tcCmds),
-                "UTF-8");
-            PrintWriter printWriter = new PrintWriter(writer)) {
+                Writer writer = new OutputStreamWriter(new FileOutputStream(tcCmds),
+                StandardCharsets.UTF_8);
+                PrintWriter printWriter = new PrintWriter(writer)) {
           for (String command : commands) {
             printWriter.println(command);
           }

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/DefaultLinuxContainerRuntime.java

@@ -48,6 +48,7 @@ import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
 import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -257,9 +258,9 @@ public class DefaultLinuxContainerRuntime implements LinuxContainerRuntime {
       File commandFile = File.createTempFile(TMP_FILE_PREFIX + filePrefix,
           TMP_FILE_SUFFIX, cmdDir);
       try (
-          Writer writer = new OutputStreamWriter(
-              new FileOutputStream(commandFile.toString()), "UTF-8");
-          PrintWriter printWriter = new PrintWriter(writer);
+              Writer writer = new OutputStreamWriter(
+              new FileOutputStream(commandFile.toString()), StandardCharsets.UTF_8);
+              PrintWriter printWriter = new PrintWriter(writer);
       ) {
         Map<String, List<String>> cmd = new HashMap<String, List<String>>();
         // command = exec

+ 5 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/DockerClient.java

@@ -38,6 +38,7 @@ import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
 import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.Map;
 
@@ -55,9 +56,9 @@ public final class DockerClient {
     File dockerEnvFile = File.createTempFile(TMP_FILE_PREFIX + filePrefix,
         TMP_ENV_FILE_SUFFIX, cmdDir);
     try (
-        Writer envWriter = new OutputStreamWriter(
-            new FileOutputStream(dockerEnvFile), "UTF-8");
-        PrintWriter envPrintWriter = new PrintWriter(envWriter);
+            Writer envWriter = new OutputStreamWriter(
+            new FileOutputStream(dockerEnvFile), StandardCharsets.UTF_8);
+            PrintWriter envPrintWriter = new PrintWriter(envWriter);
     ) {
       for (Map.Entry<String, String> entry : cmd.getEnv()
           .entrySet()) {
@@ -94,7 +95,7 @@ public final class DockerClient {
           TMP_FILE_SUFFIX, cmdDir);
       try (
         Writer writer = new OutputStreamWriter(
-            new FileOutputStream(dockerCommandFile.toString()), "UTF-8");
+              new FileOutputStream(dockerCommandFile.toString()), StandardCharsets.UTF_8);
         PrintWriter printWriter = new PrintWriter(writer);
       ) {
         printWriter.println("[docker-command-execution]");

+ 2 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/gpu/NvidiaDockerV1CommandPlugin.java

@@ -38,6 +38,7 @@ import java.io.Serializable;
 import java.io.StringWriter;
 import java.net.URL;
 import java.net.URLConnection;
+import java.nio.charset.StandardCharsets;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -104,7 +105,7 @@ public class NvidiaDockerV1CommandPlugin implements DockerCommandPlugin {
       uc.setRequestProperty("X-Requested-With", "Curl");
 
       StringWriter writer = new StringWriter();
-      IOUtils.copy(uc.getInputStream(), writer, "utf-8");
+      IOUtils.copy(uc.getInputStream(), writer, StandardCharsets.UTF_8);
       cliOptions = writer.toString();
 
       LOG.info("Additional docker CLI options from plugin to run GPU "

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/CgroupsLCEResourcesHandler.java

@@ -27,6 +27,7 @@ import java.io.InputStreamReader;
 import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
 import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -220,7 +221,7 @@ public class CgroupsLCEResourcesHandler implements LCEResourcesHandler {
     PrintWriter pw = null;
     try {
       File file = new File(path + "/" + param);
-      Writer w = new OutputStreamWriter(new FileOutputStream(file), "UTF-8");
+      Writer w = new OutputStreamWriter(new FileOutputStream(file), StandardCharsets.UTF_8);
       pw = new PrintWriter(w);
       pw.write(value);
     } catch (IOException e) {
@@ -249,7 +250,7 @@ public class CgroupsLCEResourcesHandler implements LCEResourcesHandler {
     if (LOG.isDebugEnabled()) {
       try (BufferedReader inl =
             new BufferedReader(new InputStreamReader(new FileInputStream(cgf
-              + "/tasks"), "UTF-8"))) {
+              + "/tasks"), StandardCharsets.UTF_8))) {
         str = inl.readLine();
         if (str != null) {
           LOG.debug("First line in cgroup tasks file: {} {}", cgf, str);
@@ -403,7 +404,7 @@ public class CgroupsLCEResourcesHandler implements LCEResourcesHandler {
 
     try {
       FileInputStream fis = new FileInputStream(new File(getMtabFileName()));
-      in = new BufferedReader(new InputStreamReader(fis, "UTF-8"));
+      in = new BufferedReader(new InputStreamReader(fis, StandardCharsets.UTF_8));
 
       for (String str = in.readLine(); str != null;
           str = in.readLine()) {

+ 3 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java

@@ -22,13 +22,14 @@ import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.util.ConverterUtils;
 
 /**
  * Helper functionality to read the pid from a file.
@@ -57,7 +58,7 @@ public class ProcessIdFileReader {
       File file = new File(path.toString());
       if (file.exists()) {
         FileInputStream fis = new FileInputStream(file);
-        bufReader = new BufferedReader(new InputStreamReader(fis, "UTF-8"));
+        bufReader = new BufferedReader(new InputStreamReader(fis, StandardCharsets.UTF_8));
 
         while (true) {
           String line = bufReader.readLine();

+ 1 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java

@@ -346,7 +346,7 @@ public class FSConfigToCSConfigConverter {
       if (!rulesToFile) {
         String json =
             ((ByteArrayOutputStream)mappingRulesOutputStream)
-            .toString(StandardCharsets.UTF_8.displayName());
+            .toString(StandardCharsets.UTF_8.name());
         capacitySchedulerConfig.setMappingRuleJson(json);
       }
     } else {

+ 2 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java

@@ -817,7 +817,7 @@ public class TestRMWebServicesAppsModification extends JerseyTestBase {
     HashMap<String, String> tokens = new HashMap<>();
     HashMap<String, String> secrets = new HashMap<>();
     secrets.put("secret1", Base64.encodeBase64String(
-        "mysecret".getBytes("UTF8")));
+        "mysecret".getBytes(StandardCharsets.UTF_8)));
     credentials.setSecrets(secrets);
     credentials.setTokens(tokens);
     ApplicationSubmissionContextInfo appInfo = new ApplicationSubmissionContextInfo();
@@ -840,7 +840,7 @@ public class TestRMWebServicesAppsModification extends JerseyTestBase {
     appInfo.getContainerLaunchContextInfo().setEnvironment(environment);
     appInfo.getContainerLaunchContextInfo().setAcls(acls);
     appInfo.getContainerLaunchContextInfo().getAuxillaryServiceData()
-      .put("test", Base64.encodeBase64URLSafeString("value12".getBytes("UTF8")));
+      .put("test", Base64.encodeBase64URLSafeString("value12".getBytes(StandardCharsets.UTF_8)));
     appInfo.getContainerLaunchContextInfo().setCredentials(credentials);
     appInfo.getResource().setMemory(1024);
     appInfo.getResource().setvCores(1);

+ 5 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java

@@ -31,6 +31,7 @@ import java.io.OutputStream;
 import java.io.StringWriter;
 import java.net.HttpURLConnection;
 import java.net.URL;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.concurrent.Callable;
@@ -250,7 +251,7 @@ public class TestRMWebServicesDelegationTokenAuthentication {
       InputStream errorStream = conn.getErrorStream();
       String error = "";
       BufferedReader reader = null;
-      reader = new BufferedReader(new InputStreamReader(errorStream, "UTF8"));
+      reader = new BufferedReader(new InputStreamReader(errorStream, StandardCharsets.UTF_8));
       for (String line; (line = reader.readLine()) != null;) {
         error += line;
       }
@@ -356,7 +357,7 @@ public class TestRMWebServicesDelegationTokenAuthentication {
         assertEquals(Status.OK.getStatusCode(), conn.getResponseCode());
         BufferedReader reader = null;
         try {
-          reader = new BufferedReader(new InputStreamReader(response, "UTF8"));
+          reader = new BufferedReader(new InputStreamReader(response, StandardCharsets.UTF_8));
           for (String line; (line = reader.readLine()) != null;) {
             JSONObject obj = new JSONObject(line);
             if (obj.has("token")) {
@@ -432,7 +433,7 @@ public class TestRMWebServicesDelegationTokenAuthentication {
         InputStream response = conn.getInputStream();
         assertEquals(Status.OK.getStatusCode(), conn.getResponseCode());
         try (BufferedReader reader = new BufferedReader(new InputStreamReader(
-            response, "UTF8"))) {
+            response, StandardCharsets.UTF_8))) {
           String line;
           while ((line = reader.readLine()) != null) {
             JSONObject obj = new JSONObject(line);
@@ -490,7 +491,7 @@ public class TestRMWebServicesDelegationTokenAuthentication {
       conn.setRequestProperty("Content-Type", contentType + ";charset=UTF8");
       if (body != null && !body.isEmpty()) {
         OutputStream stream = conn.getOutputStream();
-        stream.write(body.getBytes("UTF8"));
+        stream.write(body.getBytes(StandardCharsets.UTF_8));
         stream.close();
       }
     }

+ 2 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesHttpStaticUserPermissions.java

@@ -28,6 +28,7 @@ import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.net.HttpURLConnection;
 import java.net.URL;
+import java.nio.charset.StandardCharsets;
 import java.util.Map;
 import java.util.HashMap;
 
@@ -181,7 +182,7 @@ public class TestRMWebServicesHttpStaticUserPermissions {
         InputStream errorStream = conn.getErrorStream();
         String error = "";
         BufferedReader reader = new BufferedReader(
-            new InputStreamReader(errorStream, "UTF8"));
+            new InputStreamReader(errorStream, StandardCharsets.UTF_8));
         for (String line; (line = reader.readLine()) != null;) {
           error += line;
         }