Quellcode durchsuchen

HADOOP-18957. Use StandardCharsets.UTF_8 (#6231). Contributed by PJ Fanning.

Signed-off-by: Ayush Saxena <ayushsaxena@apache.org>
PJ Fanning vor 2 Jahren
Ursprung
Commit
f609460bda
100 geänderte Dateien mit 270 neuen und 268 gelöschten Zeilen
  1. 2 2
      hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/WhoClient.java
  2. 2 4
      hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/PseudoAuthenticationHandler.java
  3. 2 2
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/StringSignerSecretProvider.java
  4. 5 5
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestZKSignerSecretProvider.java
  5. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
  6. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
  7. 7 7
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FileSystemMultipartUploader.java
  8. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
  9. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java
  10. 4 9
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java
  11. 4 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java
  12. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java
  13. 3 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
  14. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPlainServer.java
  15. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java
  16. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java
  17. 2 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java
  18. 2 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/ServletUtils.java
  19. 7 7
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java
  20. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java
  21. 4 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java
  22. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java
  23. 2 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
  24. 2 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java
  25. 4 4
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractMultipartUploaderTest.java
  26. 2 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java
  27. 3 3
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java
  28. 2 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestIsActiveServlet.java
  29. 2 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java
  30. 3 3
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
  31. 3 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestUTF8.java
  32. 2 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestFileSink.java
  33. 3 3
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestStatsDMetrics.java
  34. 6 6
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestTableMapping.java
  35. 2 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestZKDelegationTokenSecretManager.java
  36. 2 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java
  37. 2 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestPureJavaCrc32.java
  38. 2 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestZKUtil.java
  39. 4 3
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/curator/TestZKCuratorManager.java
  40. 2 3
      hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java
  41. 2 2
      hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/JsonSerDeser.java
  42. 2 1
      hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/zk/RegistrySecurity.java
  43. 2 1
      hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
  44. 2 2
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java
  45. 5 5
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferClient.java
  46. 3 2
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
  47. 2 1
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java
  48. 1 2
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
  49. 2 1
      hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsContentLength.java
  50. 3 3
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
  51. 2 2
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
  52. 7 7
      hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java
  53. 3 3
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java
  54. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java
  55. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java
  56. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Storage.java
  57. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancer.java
  58. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/PmemVolumeManager.java
  59. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ExceptionHandler.java
  60. 3 3
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
  61. 3 3
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
  62. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
  63. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/StatisticsEditsVisitor.java
  64. 3 3
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
  65. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java
  66. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java
  67. 4 9
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageTextWriter.java
  68. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java
  69. 3 3
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/MD5FileUtils.java
  70. 3 4
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/PersistentLongFile.java
  71. 3 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java
  72. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBalancerBandwidth.java
  73. 3 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRollback.java
  74. 3 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgrade.java
  75. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeReport.java
  76. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMultipleNNPortQOP.java
  77. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
  78. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/TestSaslDataTransfer.java
  79. 3 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNode.java
  80. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMetrics.java
  81. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFavoredNodesEndToEnd.java
  82. 11 11
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
  83. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestINodeFile.java
  84. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java
  85. 5 5
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
  86. 3 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdmin.java
  87. 4 4
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java
  88. 3 2
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
  89. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java
  90. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestHdfsConfigFields.java
  91. 2 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java
  92. 3 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java
  93. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java
  94. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TextInputFormat.java
  95. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
  96. 2 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JSONHistoryViewerPrinter.java
  97. 3 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java
  98. 2 7
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldBasedPartitioner.java
  99. 3 8
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldHelper.java
  100. 6 7
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java

+ 2 - 2
hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/WhoClient.java

@@ -19,7 +19,7 @@ import java.io.BufferedReader;
 import java.io.InputStreamReader;
 import java.net.HttpURLConnection;
 import java.net.URL;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 
 /**
  * Example that uses <code>AuthenticatedURL</code>.
@@ -42,7 +42,7 @@ public class WhoClient {
       if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) {
         BufferedReader reader = new BufferedReader(
             new InputStreamReader(
-                conn.getInputStream(), Charset.forName("UTF-8")));
+                conn.getInputStream(), StandardCharsets.UTF_8));
         String line = reader.readLine();
         while (line != null) {
           System.out.println(line);

+ 2 - 4
hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/PseudoAuthenticationHandler.java

@@ -23,7 +23,7 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
 import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.Properties;
 
@@ -53,8 +53,6 @@ public class PseudoAuthenticationHandler implements AuthenticationHandler {
    */
   public static final String ANONYMOUS_ALLOWED = TYPE + ".anonymous.allowed";
 
-  private static final Charset UTF8_CHARSET = Charset.forName("UTF-8");
-
   private static final String PSEUDO_AUTH = "PseudoAuth";
 
   private boolean acceptAnonymous;
@@ -146,7 +144,7 @@ public class PseudoAuthenticationHandler implements AuthenticationHandler {
     if(queryString == null || queryString.length() == 0) {
       return null;
     }
-    List<NameValuePair> list = URLEncodedUtils.parse(queryString, UTF8_CHARSET);
+    List<NameValuePair> list = URLEncodedUtils.parse(queryString, StandardCharsets.UTF_8);
     if (list != null) {
       for (NameValuePair nv : list) {
         if (PseudoAuthenticator.USER_NAME.equals(nv.getName())) {

+ 2 - 2
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/StringSignerSecretProvider.java

@@ -13,7 +13,7 @@
  */
 package org.apache.hadoop.security.authentication.util;
 
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.Properties;
 import javax.servlet.ServletContext;
 
@@ -38,7 +38,7 @@ class StringSignerSecretProvider extends SignerSecretProvider {
           long tokenValidity) throws Exception {
     String signatureSecret = config.getProperty(
             AuthenticationFilter.SIGNATURE_SECRET, null);
-    secret = signatureSecret.getBytes(Charset.forName("UTF-8"));
+    secret = signatureSecret.getBytes(StandardCharsets.UTF_8);
     secrets = new byte[][]{secret};
   }
 

+ 5 - 5
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestZKSignerSecretProvider.java

@@ -13,7 +13,7 @@
  */
 package org.apache.hadoop.security.authentication.util;
 
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.Properties;
 import java.util.Random;
 import javax.servlet.ServletContext;
@@ -140,11 +140,11 @@ public class TestZKSignerSecretProvider {
     long seed = System.currentTimeMillis();
     Random rand = new Random(seed);
     byte[] secret2 = Long.toString(rand.nextLong())
-        .getBytes(Charset.forName("UTF-8"));
+        .getBytes(StandardCharsets.UTF_8);
     byte[] secret1 = Long.toString(rand.nextLong())
-        .getBytes(Charset.forName("UTF-8"));
+        .getBytes(StandardCharsets.UTF_8);
     byte[] secret3 = Long.toString(rand.nextLong())
-        .getBytes(Charset.forName("UTF-8"));
+        .getBytes(StandardCharsets.UTF_8);
     rand = new Random(seed);
     // Secrets 4 and 5 get thrown away by ZK when the new secret provider tries
     // to init
@@ -238,7 +238,7 @@ public class TestZKSignerSecretProvider {
 
     @Override
     protected byte[] generateRandomSecret() {
-      return Long.toString(rand.nextLong()).getBytes(Charset.forName("UTF-8"));
+      return Long.toString(rand.nextLong()).getBytes(StandardCharsets.UTF_8);
     }
   }
 

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java

@@ -43,6 +43,7 @@ import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
 import java.net.URLConnection;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -82,7 +83,6 @@ import javax.xml.transform.TransformerFactory;
 import javax.xml.transform.dom.DOMSource;
 import javax.xml.transform.stream.StreamResult;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.commons.collections.map.UnmodifiableMap;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -2903,7 +2903,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
         LOG.info("found resource " + name + " at " + url);
       }
 
-      return new InputStreamReader(url.openStream(), Charsets.UTF_8);
+      return new InputStreamReader(url.openStream(), StandardCharsets.UTF_8);
     } catch (Exception e) {
       return null;
     }

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java

@@ -899,7 +899,7 @@ public class FileUtil {
             try (BufferedReader reader =
                      new BufferedReader(
                          new InputStreamReader(process.getInputStream(),
-                             Charset.forName("UTF-8")))) {
+                             StandardCharsets.UTF_8))) {
               String line;
               while((line = reader.readLine()) != null) {
                 LOG.debug(line);
@@ -922,7 +922,7 @@ public class FileUtil {
             try (BufferedReader reader =
                      new BufferedReader(
                          new InputStreamReader(process.getErrorStream(),
-                             Charset.forName("UTF-8")))) {
+                             StandardCharsets.UTF_8))) {
               String line;
               while((line = reader.readLine()) != null) {
                 LOG.debug(line);

+ 7 - 7
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FileSystemMultipartUploader.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.fs.impl;
 import java.io.IOException;
 import java.io.InputStream;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Comparator;
 import java.util.HashSet;
@@ -30,7 +31,6 @@ import java.util.UUID;
 import java.util.concurrent.CompletableFuture;
 import java.util.stream.Collectors;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.hadoop.util.Preconditions;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -104,7 +104,7 @@ public class FileSystemMultipartUploader extends AbstractMultipartUploader {
       fs.mkdirs(collectorPath, FsPermission.getDirDefault());
 
       ByteBuffer byteBuffer = ByteBuffer.wrap(
-          collectorPath.toString().getBytes(Charsets.UTF_8));
+          collectorPath.toString().getBytes(StandardCharsets.UTF_8));
       return BBUploadHandle.from(byteBuffer);
     });
   }
@@ -130,7 +130,7 @@ public class FileSystemMultipartUploader extends AbstractMultipartUploader {
     byte[] uploadIdByteArray = uploadId.toByteArray();
     checkUploadId(uploadIdByteArray);
     Path collectorPath = new Path(new String(uploadIdByteArray, 0,
-        uploadIdByteArray.length, Charsets.UTF_8));
+        uploadIdByteArray.length, StandardCharsets.UTF_8));
     Path partPath =
         mergePaths(collectorPath, mergePaths(new Path(Path.SEPARATOR),
             new Path(partNumber + ".part")));
@@ -149,7 +149,7 @@ public class FileSystemMultipartUploader extends AbstractMultipartUploader {
       cleanupWithLogger(LOG, inputStream);
     }
     return BBPartHandle.from(ByteBuffer.wrap(
-        partPath.toString().getBytes(Charsets.UTF_8)));
+        partPath.toString().getBytes(StandardCharsets.UTF_8)));
   }
 
   private Path createCollectorPath(Path filePath) {
@@ -210,7 +210,7 @@ public class FileSystemMultipartUploader extends AbstractMultipartUploader {
         .map(pair -> {
           byte[] byteArray = pair.getValue().toByteArray();
           return new Path(new String(byteArray, 0, byteArray.length,
-              Charsets.UTF_8));
+              StandardCharsets.UTF_8));
         })
         .collect(Collectors.toList());
 
@@ -223,7 +223,7 @@ public class FileSystemMultipartUploader extends AbstractMultipartUploader {
         "Duplicate PartHandles");
     byte[] uploadIdByteArray = multipartUploadId.toByteArray();
     Path collectorPath = new Path(new String(uploadIdByteArray, 0,
-        uploadIdByteArray.length, Charsets.UTF_8));
+        uploadIdByteArray.length, StandardCharsets.UTF_8));
 
     boolean emptyFile = totalPartsLen(partHandles) == 0;
     if (emptyFile) {
@@ -250,7 +250,7 @@ public class FileSystemMultipartUploader extends AbstractMultipartUploader {
     byte[] uploadIdByteArray = uploadId.toByteArray();
     checkUploadId(uploadIdByteArray);
     Path collectorPath = new Path(new String(uploadIdByteArray, 0,
-        uploadIdByteArray.length, Charsets.UTF_8));
+        uploadIdByteArray.length, StandardCharsets.UTF_8));
 
     return FutureIO.eval(() -> {
       // force a check for a file existing; raises FNFE if not found

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java

@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.net.URI;
 import java.net.URISyntaxException;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.util.Iterator;
 import java.util.LinkedList;
@@ -114,7 +115,7 @@ class CopyCommands {
 
     private void writeDelimiter(FSDataOutputStream out) throws IOException {
       if (delimiter != null) {
-        out.write(delimiter.getBytes("UTF-8"));
+        out.write(delimiter.getBytes(StandardCharsets.UTF_8));
       }
     }
 

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java

@@ -120,7 +120,7 @@ public class HtmlQuoting {
       ByteArrayOutputStream buffer = new ByteArrayOutputStream();
       try {
         quoteHtmlChars(buffer, bytes, 0, bytes.length);
-        return buffer.toString("UTF-8");
+        return new String(buffer.toByteArray(), StandardCharsets.UTF_8);
       } catch (IOException ioe) {
         // Won't happen, since it is a bytearrayoutputstream
         return null;

+ 4 - 9
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java

@@ -20,7 +20,6 @@ package org.apache.hadoop.io;
 
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;
-import java.nio.charset.UnsupportedCharsetException;
 import java.util.ArrayList;
 
 import org.apache.commons.codec.binary.Base64;
@@ -75,14 +74,10 @@ public class DefaultStringifier<T> implements Stringifier<T> {
 
   @Override
   public T fromString(String str) throws IOException {
-    try {
-      byte[] bytes = Base64.decodeBase64(str.getBytes("UTF-8"));
-      inBuf.reset(bytes, bytes.length);
-      T restored = deserializer.deserialize(null);
-      return restored;
-    } catch (UnsupportedCharsetException ex) {
-      throw new IOException(ex.toString());
-    }
+    byte[] bytes = Base64.decodeBase64(str.getBytes(StandardCharsets.UTF_8));
+    inBuf.reset(bytes, bytes.length);
+    T restored = deserializer.deserialize(null);
+    return restored;
   }
 
   @Override

+ 4 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java

@@ -25,6 +25,7 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.ReflectionUtils;
 
+import java.nio.charset.StandardCharsets;
 import java.util.zip.GZIPInputStream;
 import java.util.zip.GZIPOutputStream;
 
@@ -86,12 +87,12 @@ public final class WritableUtils  {
   public static String readCompressedString(DataInput in) throws IOException {
     byte[] bytes = readCompressedByteArray(in);
     if (bytes == null) return null;
-    return new String(bytes, "UTF-8");
+    return new String(bytes, StandardCharsets.UTF_8);
   }
 
 
   public static int  writeCompressedString(DataOutput out, String s) throws IOException {
-    return writeCompressedByteArray(out, (s != null) ? s.getBytes("UTF-8") : null);
+    return writeCompressedByteArray(out, (s != null) ? s.getBytes(StandardCharsets.UTF_8) : null);
   }
 
   /*
@@ -103,7 +104,7 @@ public final class WritableUtils  {
    */
   public static void writeString(DataOutput out, String s) throws IOException {
     if (s != null) {
-      byte[] buffer = s.getBytes("UTF-8");
+      byte[] buffer = s.getBytes(StandardCharsets.UTF_8);
       int len = buffer.length;
       out.writeInt(len);
       out.write(buffer, 0, len);

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java

@@ -23,6 +23,7 @@ import java.io.InputStreamReader;
 import java.io.PrintWriter;
 import java.net.URL;
 import java.net.URLConnection;
+import java.nio.charset.StandardCharsets;
 import java.util.regex.Pattern;
 
 import javax.net.ssl.HttpsURLConnection;
@@ -33,7 +34,6 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.hadoop.classification.VisibleForTesting;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.HadoopIllegalArgumentException;
@@ -297,7 +297,7 @@ public class LogLevel {
 
       // read from the servlet
       BufferedReader in = new BufferedReader(
-          new InputStreamReader(connection.getInputStream(), Charsets.UTF_8));
+          new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8));
       for (String line;;) {
         line = in.readLine();
         if (line == null) {

+ 3 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java

@@ -23,6 +23,8 @@ import java.io.PrintWriter;
 import java.net.URL;
 import java.net.URLClassLoader;
 import static java.security.AccessController.*;
+
+import java.nio.charset.StandardCharsets;
 import java.security.PrivilegedAction;
 import java.util.Iterator;
 import java.util.Map;
@@ -289,7 +291,7 @@ class MetricsConfig extends SubsetConfiguration {
       PropertiesConfiguration tmp = new PropertiesConfiguration();
       tmp.copy(c);
       tmp.write(pw);
-      return buffer.toString("UTF-8");
+      return new String(buffer.toByteArray(), StandardCharsets.UTF_8);
     } catch (Exception e) {
       throw new MetricsConfigException(e);
     }

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPlainServer.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.security;
 
+import java.nio.charset.StandardCharsets;
 import java.security.Provider;
 import java.util.Map;
 
@@ -82,7 +83,7 @@ public class SaslPlainServer implements SaslServer {
     try {
       String payload;
       try {
-        payload = new String(response, "UTF-8");
+        payload = new String(response, StandardCharsets.UTF_8);
       } catch (Exception e) {
         throw new IllegalArgumentException("Received corrupt response", e);
       }

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java

@@ -24,7 +24,6 @@ import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.ProviderUtils;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -33,6 +32,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.security.GeneralSecurityException;
 import java.security.KeyStore;
 import java.security.KeyStoreException;
@@ -199,7 +199,7 @@ public abstract class AbstractJavaKeyStoreProvider extends CredentialProvider {
 
   public static char[] bytesToChars(byte[] bytes) throws IOException {
     String pass;
-    pass = new String(bytes, Charsets.UTF_8);
+    pass = new String(bytes, StandardCharsets.UTF_8);
     return pass.toCharArray();
   }
 
@@ -268,7 +268,7 @@ public abstract class AbstractJavaKeyStoreProvider extends CredentialProvider {
     writeLock.lock();
     try {
       keyStore.setKeyEntry(alias,
-          new SecretKeySpec(new String(material).getBytes("UTF-8"),
+          new SecretKeySpec(new String(material).getBytes(StandardCharsets.UTF_8),
               getAlgorithm()), password, null);
     } catch (KeyStoreException e) {
       throw new IOException("Can't store credential " + alias + " in " + this,

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java

@@ -70,7 +70,7 @@ public class UserProvider extends CredentialProvider {
           " already exists in " + this);
     }
     credentials.addSecretKey(new Text(name), 
-        new String(credential).getBytes("UTF-8"));
+        new String(credential).getBytes(StandardCharsets.UTF_8));
     return new CredentialEntry(name, credential);
   }
 

+ 2 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java

@@ -51,7 +51,7 @@ import javax.servlet.http.HttpServletRequestWrapper;
 import javax.servlet.http.HttpServletResponse;
 
 import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.security.Principal;
 import java.util.Enumeration;
 import java.util.List;
@@ -94,8 +94,6 @@ public class DelegationTokenAuthenticationFilter
   public static final String DELEGATION_TOKEN_SECRET_MANAGER_ATTR =
       "hadoop.http.delegation-token-secret-manager";
 
-  private static final Charset UTF8_CHARSET = Charset.forName("UTF-8");
-
   private static final ThreadLocal<UserGroupInformation> UGI_TL =
       new ThreadLocal<UserGroupInformation>();
   public static final String PROXYUSER_PREFIX = "proxyuser";
@@ -226,7 +224,7 @@ public class DelegationTokenAuthenticationFilter
     if (queryString == null) {
       return null;
     }
-    List<NameValuePair> list = URLEncodedUtils.parse(queryString, UTF8_CHARSET);
+    List<NameValuePair> list = URLEncodedUtils.parse(queryString, StandardCharsets.UTF_8);
     if (list != null) {
       for (NameValuePair nv : list) {
         if (DelegationTokenAuthenticatedURL.DO_AS.

+ 2 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/ServletUtils.java

@@ -23,7 +23,7 @@ import org.apache.http.client.utils.URLEncodedUtils;
 
 import javax.servlet.http.HttpServletRequest;
 import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 
 /**
@@ -31,7 +31,6 @@ import java.util.List;
  */
 @InterfaceAudience.Private
 class ServletUtils {
-  private static final Charset UTF8_CHARSET = Charset.forName("UTF-8");
 
   /**
    * Extract a query string parameter without triggering http parameters
@@ -49,7 +48,7 @@ class ServletUtils {
     if (queryString == null) {
       return null;
     }
-    List<NameValuePair> list = URLEncodedUtils.parse(queryString, UTF8_CHARSET);
+    List<NameValuePair> list = URLEncodedUtils.parse(queryString, StandardCharsets.UTF_8);
     if (list != null) {
       for (NameValuePair nv : list) {
         if (name.equals(nv.getName())) {

+ 7 - 7
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java

@@ -22,7 +22,7 @@ import java.io.BufferedReader;
 import java.io.InputStreamReader;
 import java.io.IOException;
 import java.math.BigInteger;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.util.HashMap;
@@ -247,7 +247,7 @@ public class SysInfoLinux extends SysInfo {
     try {
       fReader = new InputStreamReader(
           Files.newInputStream(Paths.get(procfsMemFile)),
-          Charset.forName("UTF-8"));
+          StandardCharsets.UTF_8);
       in = new BufferedReader(fReader);
     } catch (IOException f) {
       // shouldn't happen....
@@ -319,7 +319,7 @@ public class SysInfoLinux extends SysInfo {
     try {
       fReader =
           new InputStreamReader(Files.newInputStream(Paths.get(procfsCpuFile)),
-              Charset.forName("UTF-8"));
+              StandardCharsets.UTF_8);
       in = new BufferedReader(fReader);
     } catch (IOException f) {
       // shouldn't happen....
@@ -380,7 +380,7 @@ public class SysInfoLinux extends SysInfo {
     try {
       fReader = new InputStreamReader(
           Files.newInputStream(Paths.get(procfsStatFile)),
-          Charset.forName("UTF-8"));
+          StandardCharsets.UTF_8);
       in = new BufferedReader(fReader);
     } catch (IOException f) {
       // shouldn't happen....
@@ -435,7 +435,7 @@ public class SysInfoLinux extends SysInfo {
     try {
       fReader = new InputStreamReader(
           Files.newInputStream(Paths.get(procfsNetFile)),
-          Charset.forName("UTF-8"));
+          StandardCharsets.UTF_8);
       in = new BufferedReader(fReader);
     } catch (IOException f) {
       return;
@@ -490,7 +490,7 @@ public class SysInfoLinux extends SysInfo {
     try {
       in = new BufferedReader(new InputStreamReader(
           Files.newInputStream(Paths.get(procfsDisksFile)),
-          Charset.forName("UTF-8")));
+          StandardCharsets.UTF_8));
     } catch (IOException f) {
       return;
     }
@@ -558,7 +558,7 @@ public class SysInfoLinux extends SysInfo {
     try {
       in = new BufferedReader(new InputStreamReader(
           Files.newInputStream(Paths.get(procfsDiskSectorFile)),
-              Charset.forName("UTF-8")));
+              StandardCharsets.UTF_8));
     } catch (IOException f) {
       return defSector;
     }

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java

@@ -19,6 +19,7 @@ package org.apache.hadoop.util;
 
 import java.io.File;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -27,7 +28,6 @@ import org.apache.zookeeper.ZooDefs;
 import org.apache.zookeeper.data.ACL;
 import org.apache.zookeeper.data.Id;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.hadoop.thirdparty.com.google.common.base.Splitter;
 import org.apache.hadoop.thirdparty.com.google.common.io.Files;
 
@@ -148,7 +148,7 @@ public class ZKUtil {
             "Auth '" + comp + "' not of expected form scheme:auth");
       }
       ret.add(new ZKAuthInfo(parts[0],
-          parts[1].getBytes(Charsets.UTF_8)));
+          parts[1].getBytes(StandardCharsets.UTF_8)));
     }
     return ret;
   }
@@ -172,7 +172,7 @@ public class ZKUtil {
       return valInConf;
     }
     String path = valInConf.substring(1).trim();
-    return Files.asCharSource(new File(path), Charsets.UTF_8).read().trim();
+    return Files.asCharSource(new File(path), StandardCharsets.UTF_8).read().trim();
   }
 
   /**

+ 4 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java

@@ -18,7 +18,7 @@
 package org.apache.hadoop.util.curator;
 
 import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.LinkedList;
 import java.util.List;
@@ -260,7 +260,7 @@ public final class ZKCuratorManager {
   public String getStringData(final String path) throws Exception {
     byte[] bytes = getData(path);
     if (bytes != null) {
-      return new String(bytes, Charset.forName("UTF-8"));
+      return new String(bytes, StandardCharsets.UTF_8);
     }
     return null;
   }
@@ -275,7 +275,7 @@ public final class ZKCuratorManager {
   public String getStringData(final String path, Stat stat) throws Exception {
     byte[] bytes = getData(path, stat);
     if (bytes != null) {
-      return new String(bytes, Charset.forName("UTF-8"));
+      return new String(bytes, StandardCharsets.UTF_8);
     }
     return null;
   }
@@ -299,7 +299,7 @@ public final class ZKCuratorManager {
    * @throws Exception If it cannot contact Zookeeper.
    */
   public void setData(String path, String data, int version) throws Exception {
-    byte[] bytes = data.getBytes(Charset.forName("UTF-8"));
+    byte[] bytes = data.getBytes(StandardCharsets.UTF_8);
     setData(path, bytes, version);
   }
 

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java

@@ -60,7 +60,7 @@ public class TestCommonConfigurationFields extends TestConfigurationFieldsBase {
   @SuppressWarnings("deprecation")
   @Override
   public void initializeMemberVariables() {
-    xmlFilename = new String("core-default.xml");
+    xmlFilename = "core-default.xml";
     configurationClasses = new Class[] {
         CommonConfigurationKeys.class,
         CommonConfigurationKeysPublic.class,

+ 2 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java

@@ -698,7 +698,7 @@ public class TestFileUtil {
     OutputStream os = new FileOutputStream(simpleTar);
     try (TarOutputStream tos = new TarOutputStream(os)) {
       TarEntry te = new TarEntry("/bar/foo");
-      byte[] data = "some-content".getBytes("UTF-8");
+      byte[] data = "some-content".getBytes(StandardCharsets.UTF_8);
       te.setSize(data.length);
       tos.putNextEntry(te);
       tos.write(data);
@@ -782,7 +782,7 @@ public class TestFileUtil {
         ZipArchiveList.add(new ZipArchiveEntry("foo_" + i));
         ZipArchiveEntry archiveEntry = ZipArchiveList.get(i);
         archiveEntry.setUnixMode(count += 0100);
-        byte[] data = "some-content".getBytes("UTF-8");
+        byte[] data = "some-content".getBytes(StandardCharsets.UTF_8);
         archiveEntry.setSize(data.length);
         tos.putArchiveEntry(archiveEntry);
         tos.write(data);

+ 2 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java

@@ -30,6 +30,7 @@ import org.junit.Test;
 import java.io.File;
 import java.io.IOException;
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.util.HashSet;
 import java.util.Set;
 
@@ -117,7 +118,7 @@ public class TestHarFileSystemBasics {
     final FSDataOutputStream fsdos = localFileSystem.create(masterIndexPath);
     try {
       String versionString = version + "\n";
-      fsdos.write(versionString.getBytes("UTF-8"));
+      fsdos.write(versionString.getBytes(StandardCharsets.UTF_8));
       fsdos.flush();
     } finally {
       fsdos.close();

+ 4 - 4
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractMultipartUploaderTest.java

@@ -22,13 +22,13 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStream;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.security.MessageDigest;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Random;
 import java.util.concurrent.CompletableFuture;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.assertj.core.api.Assertions;
 import org.junit.Assume;
 import org.junit.Test;
@@ -596,8 +596,8 @@ public abstract class AbstractContractMultipartUploaderTest extends
     abortUpload(uploadHandle, file);
 
     String contents = "ThisIsPart49\n";
-    int len = contents.getBytes(Charsets.UTF_8).length;
-    InputStream is = IOUtils.toInputStream(contents, "UTF-8");
+    int len = contents.getBytes(StandardCharsets.UTF_8).length;
+    InputStream is = IOUtils.toInputStream(contents, StandardCharsets.UTF_8);
 
     intercept(IOException.class,
         () -> awaitFuture(
@@ -624,7 +624,7 @@ public abstract class AbstractContractMultipartUploaderTest extends
   public void testAbortUnknownUpload() throws Exception {
     Path file = methodPath();
     ByteBuffer byteBuffer = ByteBuffer.wrap(
-        "invalid-handle".getBytes(Charsets.UTF_8));
+        "invalid-handle".getBytes(StandardCharsets.UTF_8));
     intercept(FileNotFoundException.class,
         () -> abortUpload(BBUploadHandle.from(byteBuffer), file));
   }

+ 2 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java

@@ -45,6 +45,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -806,7 +807,7 @@ public class ContractTestUtils extends Assert {
     try (FSDataInputStream in = fs.open(path)) {
       byte[] buf = new byte[length];
       in.readFully(0, buf);
-      return new String(buf, "UTF-8");
+      return new String(buf, StandardCharsets.UTF_8);
     }
   }
 

+ 3 - 3
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java

@@ -23,6 +23,7 @@ import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
 import java.net.InetSocketAddress;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
@@ -30,7 +31,6 @@ import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
 import org.junit.Before;
 import org.junit.Test;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -97,8 +97,8 @@ public class TestHAAdmin {
     outBytes.reset();
     LOG.info("Running: HAAdmin " + Joiner.on(" ").join(args));
     int ret = tool.run(args);
-    errOutput = new String(errOutBytes.toByteArray(), Charsets.UTF_8);
-    output = new String(outBytes.toByteArray(), Charsets.UTF_8);
+    errOutput = new String(errOutBytes.toByteArray(), StandardCharsets.UTF_8);
+    output = new String(outBytes.toByteArray(), StandardCharsets.UTF_8);
     LOG.info("Err_output:\n" + errOutput + "\nOutput:\n" + output);
     return ret;
   }

+ 2 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestIsActiveServlet.java

@@ -27,6 +27,7 @@ import javax.servlet.http.HttpServletResponse;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
 
 import static org.junit.Assert.assertEquals;
 import static org.mockito.ArgumentMatchers.anyInt;
@@ -90,6 +91,6 @@ public class TestIsActiveServlet {
 
   private String doGet() throws IOException {
     servlet.doGet(req, resp);
-    return new String(respOut.toByteArray(), "UTF-8");
+    return new String(respOut.toByteArray(), StandardCharsets.UTF_8);
   }
 }

+ 2 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java

@@ -23,6 +23,7 @@ import static org.junit.Assume.assumeTrue;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
@@ -57,7 +58,7 @@ public class TestSecureIOUtils {
     for (File f : new File[] { testFilePathIs, testFilePathRaf,
         testFilePathFadis }) {
       FileOutputStream fos = new FileOutputStream(f);
-      fos.write("hello".getBytes("UTF-8"));
+      fos.write("hello".getBytes(StandardCharsets.UTF_8));
       fos.close();
     }
 

+ 3 - 3
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java

@@ -22,8 +22,8 @@ import java.io.IOException;
 import java.nio.BufferUnderflowException;
 import java.nio.ByteBuffer;
 import java.nio.charset.CharacterCodingException;
+import java.nio.charset.StandardCharsets;
 import java.util.Random;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.hadoop.thirdparty.com.google.common.primitives.Bytes;
 import org.junit.Test;
 
@@ -105,7 +105,7 @@ public class TestText {
       ByteBuffer bb = Text.encode(before);
           
       byte[] utf8Text = bb.array();
-      byte[] utf8Java = before.getBytes("UTF-8");
+      byte[] utf8Java = before.getBytes(StandardCharsets.UTF_8);
       assertEquals(0, WritableComparator.compareBytes(
               utf8Text, 0, bb.limit(),
               utf8Java, 0, utf8Java.length));
@@ -392,7 +392,7 @@ public class TestText {
   @Test
   public void testReadWithKnownLength() throws IOException {
     String line = "hello world";
-    byte[] inputBytes = line.getBytes(Charsets.UTF_8);
+    byte[] inputBytes = line.getBytes(StandardCharsets.UTF_8);
     DataInputBuffer in = new DataInputBuffer();
     Text text = new Text();
 

+ 3 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestUTF8.java

@@ -23,6 +23,7 @@ import java.io.DataInputStream;
 import java.io.IOException;
 import java.io.UTFDataFormatException;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.util.Random;
 
 import org.apache.hadoop.test.GenericTestUtils;
@@ -110,7 +111,7 @@ public class TestUTF8 {
     DataOutputBuffer dob = new DataOutputBuffer();
     new UTF8(s).write(dob);
 
-    assertEquals(s, new String(dob.getData(), 2, dob.getLength()-2, "UTF-8"));
+    assertEquals(s, new String(dob.getData(), 2, dob.getLength()-2, StandardCharsets.UTF_8));
   }
 
   /**
@@ -125,7 +126,7 @@ public class TestUTF8 {
     String catFace = "\uD83D\uDC31";
 
     // This encodes to 4 bytes in UTF-8:
-    byte[] encoded = catFace.getBytes("UTF-8");
+    byte[] encoded = catFace.getBytes(StandardCharsets.UTF_8);
     assertEquals(4, encoded.length);
     assertEquals("f09f90b1", StringUtils.byteToHexString(encoded));
 

+ 2 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestFileSink.java

@@ -23,6 +23,7 @@ import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.regex.Pattern;
 
 import org.apache.hadoop.io.IOUtils;
@@ -113,7 +114,7 @@ public class TestFileSink {
       is = new FileInputStream(outFile);
       baos = new ByteArrayOutputStream((int)outFile.length());
       IOUtils.copyBytes(is, baos, 1024, true);
-      outFileContent = new String(baos.toByteArray(), "UTF-8");
+      outFileContent = new String(baos.toByteArray(), StandardCharsets.UTF_8);
     } finally {
       IOUtils.cleanupWithLogger(null, baos, is);
     }

+ 3 - 3
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestStatsDMetrics.java

@@ -25,7 +25,7 @@ import static org.mockito.Mockito.when;
 import java.io.IOException;
 import java.net.DatagramPacket;
 import java.net.DatagramSocket;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
@@ -75,7 +75,7 @@ public class TestStatsDMetrics {
       sock.receive(p);
 
       String result =new String(p.getData(), 0, p.getLength(),
-          Charset.forName("UTF-8"));
+          StandardCharsets.UTF_8);
       assertTrue(
           "Received data did not match data sent",
           result.equals("host.process.jvm.Context.foo1:1.25|c") ||
@@ -109,7 +109,7 @@ public class TestStatsDMetrics {
       sink.putMetrics(record);
       sock.receive(p);
       String result =
-          new String(p.getData(), 0, p.getLength(), Charset.forName("UTF-8"));
+          new String(p.getData(), 0, p.getLength(), StandardCharsets.UTF_8);
 
       assertTrue("Received data did not match data sent",
           result.equals("process.jvm.Context.foo1:1|c") ||

+ 6 - 6
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestTableMapping.java

@@ -21,11 +21,11 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.NET_TOPOLOGY_TA
 
 import static org.junit.Assert.assertEquals;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.hadoop.thirdparty.com.google.common.io.Files;
 
 import java.io.File;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -41,7 +41,7 @@ public class TestTableMapping {
   public void testResolve() throws IOException {
     File mapFile = File.createTempFile(getClass().getSimpleName() +
         ".testResolve", ".txt");
-    Files.asCharSink(mapFile, Charsets.UTF_8).write(
+    Files.asCharSink(mapFile, StandardCharsets.UTF_8).write(
         hostName1 + " /rack1\n" + hostName2 + "\t/rack2\n");
     mapFile.deleteOnExit();
     TableMapping mapping = new TableMapping();
@@ -64,7 +64,7 @@ public class TestTableMapping {
   public void testTableCaching() throws IOException {
     File mapFile = File.createTempFile(getClass().getSimpleName() +
         ".testTableCaching", ".txt");
-    Files.asCharSink(mapFile, Charsets.UTF_8).write(
+    Files.asCharSink(mapFile, StandardCharsets.UTF_8).write(
         hostName1 + " /rack1\n" + hostName2 + "\t/rack2\n");
     mapFile.deleteOnExit();
     TableMapping mapping = new TableMapping();
@@ -128,7 +128,7 @@ public class TestTableMapping {
   public void testClearingCachedMappings() throws IOException {
     File mapFile = File.createTempFile(getClass().getSimpleName() +
         ".testClearingCachedMappings", ".txt");
-    Files.asCharSink(mapFile, Charsets.UTF_8).write(
+    Files.asCharSink(mapFile, StandardCharsets.UTF_8).write(
         hostName1 + " /rack1\n" + hostName2 + "\t/rack2\n");
     mapFile.deleteOnExit();
 
@@ -147,7 +147,7 @@ public class TestTableMapping {
     assertEquals("/rack1", result.get(0));
     assertEquals("/rack2", result.get(1));
 
-    Files.asCharSink(mapFile, Charsets.UTF_8).write("");
+    Files.asCharSink(mapFile, StandardCharsets.UTF_8).write("");
 
     mapping.reloadCachedMappings();
 
@@ -166,7 +166,7 @@ public class TestTableMapping {
   public void testBadFile() throws IOException {
     File mapFile = File.createTempFile(getClass().getSimpleName() +
         ".testBadFile", ".txt");
-    Files.asCharSink(mapFile, Charsets.UTF_8).write("bad contents");
+    Files.asCharSink(mapFile, StandardCharsets.UTF_8).write("bad contents");
     mapFile.deleteOnExit();
     TableMapping mapping = new TableMapping();
 

+ 2 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestZKDelegationTokenSecretManager.java

@@ -19,6 +19,7 @@
 package org.apache.hadoop.security.token.delegation;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -400,7 +401,7 @@ public class TestZKDelegationTokenSecretManager {
         .connectString(connectString)
         .retryPolicy(retryPolicy)
         .aclProvider(digestAclProvider)
-        .authorization("digest", userPass.getBytes("UTF-8"))
+        .authorization("digest", userPass.getBytes(StandardCharsets.UTF_8))
         .build();
     curatorFramework.start();
     ZKDelegationTokenSecretManager.setCurator(curatorFramework);

+ 2 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java

@@ -24,6 +24,7 @@ import java.io.File;
 import java.io.IOException;
 import java.io.PrintStream;
 import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.jar.Attributes;
 import java.util.jar.JarFile;
 import java.util.jar.Manifest;
@@ -46,7 +47,7 @@ public class TestClasspath {
       .class);
   private static final File TEST_DIR = GenericTestUtils.getTestDir(
       "TestClasspath");
-  private static final Charset UTF8 = Charset.forName("UTF-8");
+  private static final Charset UTF8 = StandardCharsets.UTF_8;
 
   static {
     ExitUtil.disableSystemExit();

+ 2 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestPureJavaCrc32.java

@@ -21,6 +21,7 @@ import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.PrintStream;
 import java.lang.reflect.Constructor;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Properties;
@@ -49,7 +50,7 @@ public class TestPureJavaCrc32 {
 
     checkOnBytes(new byte[] {40, 60, 97, -70}, false);
     
-    checkOnBytes("hello world!".getBytes("UTF-8"), false);
+    checkOnBytes("hello world!".getBytes(StandardCharsets.UTF_8), false);
 
     for (int i = 0; i < 10000; i++) {
       byte randomBytes[] = new byte[new Random().nextInt(2048)];

+ 2 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestZKUtil.java

@@ -22,6 +22,7 @@ import static org.junit.Assert.*;
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 
 import org.apache.hadoop.test.GenericTestUtils;
@@ -31,7 +32,6 @@ import org.apache.zookeeper.ZooDefs.Perms;
 import org.apache.zookeeper.data.ACL;
 import org.junit.Test;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.hadoop.thirdparty.com.google.common.io.Files;
 
 public class TestZKUtil {
@@ -131,7 +131,7 @@ public class TestZKUtil {
     assertEquals("x", ZKUtil.resolveConfIndirection("x"));
     
     TEST_FILE.getParentFile().mkdirs();
-    Files.asCharSink(TEST_FILE, Charsets.UTF_8).write("hello world");
+    Files.asCharSink(TEST_FILE, StandardCharsets.UTF_8).write("hello world");
     assertEquals("hello world", ZKUtil.resolveConfIndirection(
         "@" + TEST_FILE.getAbsolutePath()));
     

+ 4 - 3
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/curator/TestZKCuratorManager.java

@@ -22,6 +22,7 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -117,7 +118,7 @@ public class TestZKCuratorManager {
     curator.create(node1);
     assertNull(curator.getStringData(node1));
 
-    byte[] setData = "setData".getBytes("UTF-8");
+    byte[] setData = "setData".getBytes(StandardCharsets.UTF_8);
     curator.setData(node1, setData, -1);
     assertEquals("setData", curator.getStringData(node1));
 
@@ -136,7 +137,7 @@ public class TestZKCuratorManager {
     String fencingNodePath = "/fencing";
     String node1 = "/node1";
     String node2 = "/node2";
-    byte[] testData = "testData".getBytes("UTF-8");
+    byte[] testData = "testData".getBytes(StandardCharsets.UTF_8);
     assertFalse(curator.exists(fencingNodePath));
     assertFalse(curator.exists(node1));
     assertFalse(curator.exists(node2));
@@ -154,7 +155,7 @@ public class TestZKCuratorManager {
     assertTrue(Arrays.equals(testData, curator.getData(node1)));
     assertTrue(Arrays.equals(testData, curator.getData(node2)));
 
-    byte[] setData = "setData".getBytes("UTF-8");
+    byte[] setData = "setData".getBytes(StandardCharsets.UTF_8);
     txn = curator.createTransaction(zkAcl, fencingNodePath);
     txn.setData(node1, setData, -1);
     txn.delete(node2);

+ 2 - 3
hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java

@@ -33,7 +33,7 @@ import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.lang.annotation.Annotation;
 import java.lang.reflect.Type;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.Map;
 
@@ -64,8 +64,7 @@ public class KMSJSONWriter implements MessageBodyWriter<Object> {
       Annotation[] annotations, MediaType mediaType,
       MultivaluedMap<String, Object> stringObjectMultivaluedMap,
       OutputStream outputStream) throws IOException, WebApplicationException {
-    Writer writer = new OutputStreamWriter(outputStream, Charset
-        .forName("UTF-8"));
+    Writer writer = new OutputStreamWriter(outputStream, StandardCharsets.UTF_8);
     JsonSerialization.writer().writeValue(writer, obj);
   }
 

+ 2 - 2
hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/JsonSerDeser.java

@@ -28,6 +28,7 @@ import org.apache.hadoop.util.JsonSerialization;
 
 import java.io.EOFException;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
 /**
  * Support for marshalling objects to and from JSON.
@@ -47,7 +48,6 @@ import java.io.IOException;
 @InterfaceStability.Evolving
 public class JsonSerDeser<T> extends JsonSerialization<T> {
 
-  private static final String UTF_8 = "UTF-8";
   public static final String E_NO_DATA = "No data at path";
   public static final String E_DATA_TOO_SHORT = "Data at path too short";
   public static final String E_MISSING_MARKER_STRING =
@@ -102,7 +102,7 @@ public class JsonSerDeser<T> extends JsonSerialization<T> {
     if (StringUtils.isNotEmpty(marker) && len < marker.length()) {
       throw new NoRecordException(path, E_DATA_TOO_SHORT);
     }
-    String json = new String(bytes, 0, len, UTF_8);
+    String json = new String(bytes, 0, len, StandardCharsets.UTF_8);
     if (StringUtils.isNotEmpty(marker)
         && !json.contains(marker)) {
       throw new NoRecordException(path, E_MISSING_MARKER_STRING + marker);

+ 2 - 1
hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/zk/RegistrySecurity.java

@@ -42,6 +42,7 @@ import org.slf4j.LoggerFactory;
 import javax.security.auth.login.AppConfigurationEntry;
 import java.io.File;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.security.NoSuchAlgorithmException;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -295,7 +296,7 @@ public class RegistrySecurity extends AbstractService {
           digestAuthUser = id;
           digestAuthPassword = pass;
           String authPair = id + ":" + pass;
-          digestAuthData = authPair.getBytes("UTF-8");
+          digestAuthData = authPair.getBytes(StandardCharsets.UTF_8);
           if (LOG.isDebugEnabled()) {
             LOG.debug("Auth is Digest ACL: {}", aclToString(acl));
           }

+ 2 - 1
hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java

@@ -80,6 +80,7 @@ import java.nio.ByteBuffer;
 import java.nio.channels.DatagramChannel;
 import java.nio.channels.ServerSocketChannel;
 import java.nio.channels.SocketChannel;
+import java.nio.charset.StandardCharsets;
 import java.security.KeyFactory;
 import java.security.NoSuchAlgorithmException;
 import java.security.PrivateKey;
@@ -628,7 +629,7 @@ public class RegistryDNS extends AbstractService implements DNSOperations,
       Name zoneName = zone.getOrigin();
       DNSKEYRecord dnskeyRecord = dnsKeyRecs.get(zoneName);
       if (dnskeyRecord == null) {
-        byte[] key = Base64.decodeBase64(publicKey.getBytes("UTF-8"));
+        byte[] key = Base64.decodeBase64(publicKey.getBytes(StandardCharsets.UTF_8));
         dnskeyRecord = new DNSKEYRecord(zoneName,
             DClass.IN, ttl,
             DNSKEYRecord.Flags.ZONE_KEY,

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java

@@ -30,6 +30,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.net.InetAddress;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
@@ -59,7 +60,6 @@ import org.apache.hadoop.security.SaslRpcServer.QualityOfProtection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet;
 import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
 import org.apache.hadoop.thirdparty.com.google.common.net.InetAddresses;
@@ -147,7 +147,7 @@ public final class DataTransferSaslUtil {
    * @return key encoded as SASL password
    */
   public static char[] encryptionKeyToPassword(byte[] encryptionKey) {
-    return new String(Base64.encodeBase64(encryptionKey, false), Charsets.UTF_8)
+    return new String(Base64.encodeBase64(encryptionKey, false), StandardCharsets.UTF_8)
         .toCharArray();
   }
 

+ 5 - 5
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferClient.java

@@ -30,6 +30,7 @@ import java.io.InputStream;
 import java.io.OutputStream;
 import java.net.InetAddress;
 import java.net.Socket;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicBoolean;
@@ -65,7 +66,6 @@ import org.apache.hadoop.util.Lists;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 
 /**
  * Negotiates SASL for DataTransferProtocol on behalf of a client.  There are
@@ -347,7 +347,7 @@ public class SaslDataTransferClient {
     return encryptionKey.keyId + NAME_DELIMITER +
         encryptionKey.blockPoolId + NAME_DELIMITER +
         new String(Base64.encodeBase64(encryptionKey.nonce, false),
-            Charsets.UTF_8);
+            StandardCharsets.UTF_8);
   }
 
   /**
@@ -450,7 +450,7 @@ public class SaslDataTransferClient {
   private void updateToken(Token<BlockTokenIdentifier> accessToken,
       SecretKey secretKey, Map<String, String> saslProps)
       throws IOException {
-    byte[] newSecret = saslProps.get(Sasl.QOP).getBytes(Charsets.UTF_8);
+    byte[] newSecret = saslProps.get(Sasl.QOP).getBytes(StandardCharsets.UTF_8);
     BlockTokenIdentifier bkid = accessToken.decodeIdentifier();
     bkid.setHandshakeMsg(newSecret);
     byte[] bkidBytes = bkid.getBytes();
@@ -471,7 +471,7 @@ public class SaslDataTransferClient {
    */
   private static String buildUserName(Token<BlockTokenIdentifier> blockToken) {
     return new String(Base64.encodeBase64(blockToken.getIdentifier(), false),
-        Charsets.UTF_8);
+        StandardCharsets.UTF_8);
   }
 
   /**
@@ -483,7 +483,7 @@ public class SaslDataTransferClient {
    */
   private char[] buildClientPassword(Token<BlockTokenIdentifier> blockToken) {
     return new String(Base64.encodeBase64(blockToken.getPassword(), false),
-        Charsets.UTF_8).toCharArray();
+        StandardCharsets.UTF_8).toCharArray();
   }
 
   /**

+ 3 - 2
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java

@@ -27,6 +27,7 @@ import java.io.File;
 import java.io.InputStreamReader;
 import java.io.IOException;
 import java.io.Reader;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.util.ArrayList;
@@ -84,7 +85,7 @@ public final class CombinedHostsFileReader {
     if (hostFile.length() > 0) {
       try (Reader input =
           new InputStreamReader(
-              Files.newInputStream(hostFile.toPath()), "UTF-8")) {
+              Files.newInputStream(hostFile.toPath()), StandardCharsets.UTF_8)) {
         allDNs = objectMapper.readValue(input, DatanodeAdminProperties[].class);
       } catch (JsonMappingException jme) {
         // The old format doesn't have json top-level token to enclose
@@ -103,7 +104,7 @@ public final class CombinedHostsFileReader {
       List<DatanodeAdminProperties> all = new ArrayList<>();
       try (Reader input =
           new InputStreamReader(Files.newInputStream(Paths.get(hostsFilePath)),
-                  "UTF-8")) {
+                  StandardCharsets.UTF_8)) {
         Iterator<DatanodeAdminProperties> iterator =
             objectReader.readValues(jsonFactory.createParser(input));
         while (iterator.hasNext()) {

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java

@@ -21,6 +21,7 @@ package org.apache.hadoop.hdfs.util;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.util.Set;
@@ -62,7 +63,7 @@ public final class CombinedHostsFileWriter {
 
     try (Writer output =
         new OutputStreamWriter(Files.newOutputStream(Paths.get(hostsFile)),
-            "UTF-8")) {
+            StandardCharsets.UTF_8)) {
       objectMapper.writeValue(output, allDNs);
     }
   }

+ 1 - 2
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java

@@ -137,7 +137,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.classification.VisibleForTesting;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.hadoop.util.Preconditions;
 
 /** A FileSystem for HDFS over the web. */
@@ -1792,7 +1791,7 @@ public class WebHdfsFileSystem extends FileSystem
     }
     DirectoryListing listing = new FsPathResponseRunner<DirectoryListing>(
         GetOpParam.Op.LISTSTATUS_BATCH,
-        f, new StartAfterParam(new String(prevKey, Charsets.UTF_8))) {
+        f, new StartAfterParam(new String(prevKey, StandardCharsets.UTF_8))) {
       @Override
       DirectoryListing decodeResponse(Map<?, ?> json) throws IOException {
         return JsonUtilClient.toDirectoryListing(json);

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsContentLength.java

@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.net.ServerSocket;
 import java.net.Socket;
+import java.nio.charset.StandardCharsets;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
@@ -204,7 +205,7 @@ public class TestWebHdfsContentLength {
             if (n <= 0) {
               break;
             }
-            sb.append(new String(buf, 0, n, "UTF-8"));
+            sb.append(new String(buf, 0, n, StandardCharsets.UTF_8));
           }
           return sb.toString();
         } finally {

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java

@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.fs.http.client;
 
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -24,7 +25,6 @@ import java.util.EnumSet;
 import java.util.List;
 
 import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.type.MapType;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -796,7 +796,7 @@ public class HttpFSFileSystem extends FileSystem
     Map<String, String> params = new HashMap<String, String>();
     params.put(OP_PARAM, Operation.LISTSTATUS_BATCH.toString());
     if (token != null) {
-      params.put(START_AFTER_PARAM, new String(token, Charsets.UTF_8));
+      params.put(START_AFTER_PARAM, new String(token, StandardCharsets.UTF_8));
     }
     HttpURLConnection conn = getConnection(
         Operation.LISTSTATUS_BATCH.getMethod(),
@@ -811,7 +811,7 @@ public class HttpFSFileSystem extends FileSystem
     byte[] newToken = null;
     if (statuses.length > 0) {
       newToken = statuses[statuses.length - 1].getPath().getName().toString()
-          .getBytes(Charsets.UTF_8);
+          .getBytes(StandardCharsets.UTF_8);
     }
     // Parse the remainingEntries boolean into hasMore
     final long remainingEntries = (Long) listing.get(REMAINING_ENTRIES_JSON);

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java

@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.fs.http.server;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -91,6 +90,7 @@ import javax.ws.rs.core.UriInfo;
 import java.io.IOException;
 import java.io.InputStream;
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.security.AccessControlException;
 import java.security.PrivilegedExceptionAction;
 import java.text.MessageFormat;
@@ -422,7 +422,7 @@ public class HttpFSServer {
           HttpFSParametersProvider.StartAfterParam.class);
       byte[] token = HttpFSUtils.EMPTY_BYTES;
       if (startAfter != null) {
-        token = startAfter.getBytes(Charsets.UTF_8);
+        token = startAfter.getBytes(StandardCharsets.UTF_8);
       }
       FSOperations.FSListStatusBatch command = new FSOperations
           .FSListStatusBatch(path, token);

+ 7 - 7
hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java

@@ -25,7 +25,7 @@ import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.net.SocketAddress;
 import java.nio.ByteBuffer;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.EnumSet;
 
 import io.netty.buffer.ByteBuf;
@@ -681,15 +681,15 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
       }
       int rtmax = config.getInt(NfsConfigKeys.DFS_NFS_MAX_READ_TRANSFER_SIZE_KEY,
           NfsConfigKeys.DFS_NFS_MAX_READ_TRANSFER_SIZE_DEFAULT);
-      if (rtmax < target.getBytes(Charset.forName("UTF-8")).length) {
+      if (rtmax < target.getBytes(StandardCharsets.UTF_8).length) {
         LOG.error("Link size: {} is larger than max transfer size: {}",
-            target.getBytes(Charset.forName("UTF-8")).length, rtmax);
+            target.getBytes(StandardCharsets.UTF_8).length, rtmax);
         return new READLINK3Response(Nfs3Status.NFS3ERR_IO, postOpAttr,
             new byte[0]);
       }
 
       return new READLINK3Response(Nfs3Status.NFS3_OK, postOpAttr,
-          target.getBytes(Charset.forName("UTF-8")));
+          target.getBytes(StandardCharsets.UTF_8));
 
     } catch (IOException e) {
       LOG.warn("Readlink error", e);
@@ -1515,7 +1515,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
       }
       // This happens when startAfter was just deleted
       LOG.info("Cookie couldn't be found: {}, do listing from beginning",
-          new String(startAfter, Charset.forName("UTF-8")));
+          new String(startAfter, StandardCharsets.UTF_8));
       dlisting = dfsClient
           .listPaths(dirFileIdPath, HdfsFileStatus.EMPTY_NAME);
     }
@@ -1628,7 +1628,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
         startAfter = HdfsFileStatus.EMPTY_NAME;
       } else {
         String inodeIdPath = Nfs3Utils.getFileIdPath(cookie);
-        startAfter = inodeIdPath.getBytes(Charset.forName("UTF-8"));
+        startAfter = inodeIdPath.getBytes(StandardCharsets.UTF_8);
       }
 
       dlisting = listPaths(dfsClient, dirFileIdPath, startAfter);
@@ -1800,7 +1800,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
         startAfter = HdfsFileStatus.EMPTY_NAME;
       } else {
         String inodeIdPath = Nfs3Utils.getFileIdPath(cookie);
-        startAfter = inodeIdPath.getBytes(Charset.forName("UTF-8"));
+        startAfter = inodeIdPath.getBytes(StandardCharsets.UTF_8);
       }
 
       dlisting = listPaths(dfsClient, dirFileIdPath, startAfter);

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java

@@ -28,6 +28,7 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.Map;
 
@@ -62,7 +63,6 @@ import org.apache.hadoop.util.Lists;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 
 /**
  * Negotiates SASL for DataTransferProtocol on behalf of a server.  There are
@@ -326,7 +326,7 @@ public class SaslDataTransferServer {
     byte[] tokenPassword = blockPoolTokenSecretManager.retrievePassword(
       identifier);
     return (new String(Base64.encodeBase64(tokenPassword, false),
-      Charsets.UTF_8)).toCharArray();
+      StandardCharsets.UTF_8)).toCharArray();
   }
 
   /**
@@ -381,7 +381,7 @@ public class SaslDataTransferServer {
       if (secret != null || bpid != null) {
         // sanity check, if one is null, the other must also not be null
         assert(secret != null && bpid != null);
-        String qop = new String(secret, Charsets.UTF_8);
+        String qop = new String(secret, StandardCharsets.UTF_8);
         saslProps.put(Sasl.QOP, qop);
       }
       SaslParticipant sasl = SaslParticipant.createServerSaslParticipant(

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java

@@ -25,6 +25,7 @@ import java.io.InputStream;
 import java.io.OutputStreamWriter;
 import java.net.URL;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.StandardCopyOption;
 import java.security.PrivilegedExceptionAction;
@@ -72,7 +73,6 @@ import org.apache.hadoop.util.StopWatch;
 import org.apache.hadoop.util.Time;
 
 import org.apache.hadoop.classification.VisibleForTesting;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.hadoop.util.Preconditions;
 import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList;
 import org.apache.hadoop.thirdparty.protobuf.TextFormat;
@@ -1105,7 +1105,7 @@ public class Journal implements Closeable {
       // Write human-readable data after the protobuf. This is only
       // to assist in debugging -- it's not parsed at all.
       try(OutputStreamWriter writer =
-          new OutputStreamWriter(fos, Charsets.UTF_8)) {
+          new OutputStreamWriter(fos, StandardCharsets.UTF_8)) {
         writer.write(String.valueOf(newData));
         writer.write('\n');
         writer.flush();

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java

@@ -18,10 +18,10 @@
 
 package org.apache.hadoop.hdfs.security.token.block;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import java.io.ByteArrayInputStream;
 import java.io.DataInputStream;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.security.MessageDigest;
 import java.security.SecureRandom;
 import java.util.Arrays;
@@ -293,7 +293,7 @@ public class BlockTokenSecretManager extends
     if (shouldWrapQOP) {
       String qop = Server.getAuxiliaryPortEstablishedQOP();
       if (qop != null) {
-        id.setHandshakeMsg(qop.getBytes(Charsets.UTF_8));
+        id.setHandshakeMsg(qop.getBytes(StandardCharsets.UTF_8));
       }
     }
     return new Token<BlockTokenIdentifier>(id, this);

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Storage.java

@@ -25,6 +25,7 @@ import java.io.RandomAccessFile;
 import java.lang.management.ManagementFactory;
 import java.nio.channels.FileLock;
 import java.nio.channels.OverlappingFileLockException;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.DirectoryStream;
 import java.nio.file.Files;
 import java.nio.file.attribute.PosixFilePermission;
@@ -53,7 +54,6 @@ import org.apache.hadoop.io.nativeio.NativeIOException;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.util.VersionInfo;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.hadoop.util.Preconditions;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -944,7 +944,7 @@ public abstract class Storage extends StorageInfo {
           LOG.error("Unable to acquire file lock on path {}", lockF);
           throw new OverlappingFileLockException();
         }
-        file.write(jvmName.getBytes(Charsets.UTF_8));
+        file.write(jvmName.getBytes(StandardCharsets.UTF_8));
         LOG.info("Lock on {} acquired by nodename {}", lockF, jvmName);
       } catch(OverlappingFileLockException oe) {
         // Cannot read from the locked file on Windows.

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancer.java

@@ -42,7 +42,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
@@ -450,7 +450,7 @@ public class DiskBalancer {
 
     if ((planID == null) ||
         (planID.length() != sha1Length) ||
-        !DigestUtils.sha1Hex(plan.getBytes(Charset.forName("UTF-8")))
+        !DigestUtils.sha1Hex(plan.getBytes(StandardCharsets.UTF_8))
             .equalsIgnoreCase(planID)) {
       LOG.error("Disk Balancer - Invalid plan hash.");
       throw new DiskBalancerException("Invalid or mis-matched hash.",

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/PmemVolumeManager.java

@@ -35,6 +35,7 @@ import java.io.IOException;
 import java.io.RandomAccessFile;
 import java.nio.MappedByteBuffer;
 import java.nio.channels.FileChannel;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
@@ -333,7 +334,7 @@ public final class PmemVolumeManager {
 
     String uuidStr = UUID.randomUUID().toString();
     String testFilePath = realPmemDir.getPath() + "/.verify.pmem." + uuidStr;
-    byte[] contents = uuidStr.getBytes("UTF-8");
+    byte[] contents = uuidStr.getBytes(StandardCharsets.UTF_8);
     RandomAccessFile testFile = null;
     MappedByteBuffer out = null;
     try {

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ExceptionHandler.java

@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.hdfs.server.datanode.web.webhdfs;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import com.sun.jersey.api.ParamException;
 import com.sun.jersey.api.container.ContainerException;
 import io.netty.buffer.Unpooled;
@@ -32,6 +31,7 @@ import org.apache.hadoop.security.token.SecretManager;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
 import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_LENGTH;
 import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_TYPE;
@@ -83,7 +83,7 @@ class ExceptionHandler {
       s = INTERNAL_SERVER_ERROR;
     }
 
-    final byte[] js = JsonUtil.toJsonString(e).getBytes(Charsets.UTF_8);
+    final byte[] js = JsonUtil.toJsonString(e).getBytes(StandardCharsets.UTF_8);
     DefaultFullHttpResponse resp =
       new DefaultFullHttpResponse(HTTP_1_1, s, Unpooled.wrappedBuffer(js));
 

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java

@@ -96,6 +96,7 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SNAPSHOT_DIFF_LI
 import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SNAPSHOT_DIFF_LISTING_LIMIT_DEFAULT;
 import static org.apache.hadoop.hdfs.DFSUtil.isParentEntry;
 
+import java.nio.charset.StandardCharsets;
 import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.commons.text.CaseUtils;
@@ -343,7 +344,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.classification.VisibleForTesting;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.hadoop.util.Preconditions;
 import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap;
 import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
@@ -1982,7 +1982,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean,
         File file = new File(System.getProperty("hadoop.log.dir"), filename);
         PrintWriter out = new PrintWriter(new BufferedWriter(
                 new OutputStreamWriter(Files.newOutputStream(file.toPath()),
-                        Charsets.UTF_8)));
+                        StandardCharsets.UTF_8)));
         metaSave(out);
         out.flush();
         out.close();
@@ -4217,7 +4217,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean,
   public byte[] getSrcPathsHash(String[] srcs) {
     synchronized (digest) {
       for (String src : srcs) {
-        digest.update(src.getBytes(Charsets.UTF_8));
+        digest.update(src.getBytes(StandardCharsets.UTF_8));
       }
       byte[] result = digest.digest();
       digest.reset();

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java

@@ -26,6 +26,7 @@ import java.net.InetAddress;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.UnknownHostException;
+import java.nio.charset.StandardCharsets;
 import java.security.Principal;
 import java.security.PrivilegedExceptionAction;
 import java.util.Base64;
@@ -124,7 +125,6 @@ import org.apache.hadoop.util.Lists;
 import org.apache.hadoop.util.StringUtils;
 
 import org.apache.hadoop.classification.VisibleForTesting;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import com.sun.jersey.spi.container.ResourceFilters;
 
 /** Web-hdfs NameNode implementation. */
@@ -1339,7 +1339,7 @@ public class NamenodeWebHdfsMethods {
     {
       byte[] start = HdfsFileStatus.EMPTY_NAME;
       if (startAfter != null && startAfter.getValue() != null) {
-        start = startAfter.getValue().getBytes(Charsets.UTF_8);
+        start = startAfter.getValue().getBytes(StandardCharsets.UTF_8);
       }
       final DirectoryListing listing = getDirectoryListing(cp, fullpath, start);
       final String js = JsonUtil.toJsonString(listing);
@@ -1532,7 +1532,7 @@ public class NamenodeWebHdfsMethods {
       @Override
       public void write(final OutputStream outstream) throws IOException {
         final PrintWriter out = new PrintWriter(new OutputStreamWriter(
-            outstream, Charsets.UTF_8));
+            outstream, StandardCharsets.UTF_8));
         out.println("{\"" + FileStatus.class.getSimpleName() + "es\":{\""
             + FileStatus.class.getSimpleName() + "\":[");
 

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java

@@ -22,6 +22,7 @@ import java.io.FileInputStream;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 import java.util.Stack;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -41,7 +42,6 @@ import org.xml.sax.XMLReader;
 import org.xml.sax.helpers.DefaultHandler;
 import org.xml.sax.helpers.XMLReaderFactory;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 
 /**
  * OfflineEditsXmlLoader walks an EditsVisitor over an OEV XML file
@@ -75,7 +75,7 @@ class OfflineEditsXmlLoader
         File inputFile, OfflineEditsViewer.Flags flags) throws FileNotFoundException {
     this.visitor = visitor;
     this.fileReader =
-        new InputStreamReader(new FileInputStream(inputFile), Charsets.UTF_8);
+        new InputStreamReader(new FileInputStream(inputFile), StandardCharsets.UTF_8);
     this.fixTxIds = flags.getFixTxIds();
   }
 

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/StatisticsEditsVisitor.java

@@ -21,6 +21,7 @@ import java.io.IOException;
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
 import java.util.Map;
 import java.util.HashMap;
 
@@ -30,7 +31,6 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp;
 import org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 
 /**
  * StatisticsEditsVisitor implements text version of EditsVisitor
@@ -53,7 +53,7 @@ public class StatisticsEditsVisitor implements OfflineEditsVisitor {
    * @param out Name of file to write output to
    */
   public StatisticsEditsVisitor(OutputStream out) throws IOException {
-    this.out = new PrintWriter(new OutputStreamWriter(out, Charsets.UTF_8));
+    this.out = new PrintWriter(new OutputStreamWriter(out, StandardCharsets.UTF_8));
   }
 
   /** Start the visitor */

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java

@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.hdfs.tools.offlineImageViewer;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import io.netty.buffer.ByteBuf;
 import io.netty.buffer.Unpooled;
 import io.netty.channel.ChannelFutureListener;
@@ -37,6 +36,7 @@ import org.apache.hadoop.util.StringUtils;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.Map;
 
@@ -124,7 +124,7 @@ class FSImageHandler extends SimpleChannelInboundHandler<HttpRequest> {
 
     DefaultFullHttpResponse resp = new DefaultFullHttpResponse(HTTP_1_1,
         HttpResponseStatus.OK, Unpooled.wrappedBuffer(content
-            .getBytes(Charsets.UTF_8)));
+            .getBytes(StandardCharsets.UTF_8)));
     resp.headers().set(CONTENT_TYPE, APPLICATION_JSON_UTF8);
     resp.headers().set(CONTENT_LENGTH, resp.content().readableBytes());
     resp.headers().set(CONNECTION, CLOSE);
@@ -142,7 +142,7 @@ class FSImageHandler extends SimpleChannelInboundHandler<HttpRequest> {
     Exception e = cause instanceof Exception ? (Exception) cause : new
         Exception(cause);
     final String output = JsonUtil.toJsonString(e);
-    ByteBuf content = Unpooled.wrappedBuffer(output.getBytes(Charsets.UTF_8));
+    ByteBuf content = Unpooled.wrappedBuffer(output.getBytes(StandardCharsets.UTF_8));
     final DefaultFullHttpResponse resp = new DefaultFullHttpResponse(
             HTTP_1_1, INTERNAL_SERVER_ERROR, content);
 

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java

@@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.tools.offlineImageViewer;
 
 import java.io.DataInputStream;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.Date;
@@ -320,7 +321,7 @@ class ImageLoaderCurrent implements ImageLoader {
     for(int i = 0; i < numINUC; i++) {
       v.visitEnclosingElement(ImageElement.INODE_UNDER_CONSTRUCTION);
       byte [] name = FSImageSerialization.readBytes(in);
-      String n = new String(name, "UTF8");
+      String n = new String(name, StandardCharsets.UTF_8);
       v.visit(ImageElement.INODE_PATH, n);
       
       if (NameNodeLayoutVersion.supports(Feature.ADD_INODE_ID, imageVersion)) {

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java

@@ -36,7 +36,7 @@ import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.security.DigestOutputStream;
@@ -1840,7 +1840,7 @@ class OfflineImageReconstructor {
       Files.deleteIfExists(Paths.get(outputPath));
       fout = Files.newOutputStream(Paths.get(outputPath));
       fis = Files.newInputStream(Paths.get(inputPath));
-      reader = new InputStreamReader(fis, Charset.forName("UTF-8"));
+      reader = new InputStreamReader(fis, StandardCharsets.UTF_8);
       out = new CountingOutputStream(
           new DigestOutputStream(
               new BufferedOutputStream(fout), digester));

+ 4 - 9
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageTextWriter.java

@@ -26,11 +26,11 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.PrintStream;
 import java.io.RandomAccessFile;
-import java.io.UnsupportedEncodingException;
 import java.io.ByteArrayInputStream;
 import java.io.DataInputStream;
 import java.nio.ByteBuffer;
 import java.nio.channels.FileChannel;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
@@ -419,9 +419,8 @@ abstract class PBImageTextWriter implements Closeable {
       return ByteBuffer.allocate(8).putLong(value).array();
     }
 
-    private static byte[] toBytes(String value)
-        throws UnsupportedEncodingException {
-      return value.getBytes("UTF-8");
+    private static byte[] toBytes(String value) {
+      return value.getBytes(StandardCharsets.UTF_8);
     }
 
     private static long toLong(byte[] bytes) {
@@ -430,11 +429,7 @@ abstract class PBImageTextWriter implements Closeable {
     }
 
     private static String toString(byte[] bytes) throws IOException {
-      try {
-        return new String(bytes, "UTF-8");
-      } catch (UnsupportedEncodingException e) {
-        throw new IOException(e);
-      }
+      return new String(bytes, StandardCharsets.UTF_8);
     }
 
     @Override

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java

@@ -19,10 +19,10 @@ package org.apache.hadoop.hdfs.tools.offlineImageViewer;
 
 import java.io.IOException;
 import java.io.OutputStreamWriter;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 
 /**
  * TextWriterImageProcessor mixes in the ability for ImageVisitor
@@ -61,7 +61,7 @@ abstract class TextWriterImageVisitor extends ImageVisitor {
     super();
     this.printToScreen = printToScreen;
     fw = new OutputStreamWriter(Files.newOutputStream(Paths.get(filename)),
-        Charsets.UTF_8);
+        StandardCharsets.UTF_8);
     okToWrite = true;
   }
   

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/MD5FileUtils.java

@@ -23,6 +23,7 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.security.DigestInputStream;
 import java.security.MessageDigest;
@@ -35,7 +36,6 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.MD5Hash;
 import org.apache.hadoop.util.StringUtils;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 
 /**
  * Static functions for dealing with files of the same format
@@ -75,7 +75,7 @@ public abstract class MD5FileUtils {
   private static Matcher readStoredMd5(File md5File) throws IOException {
     BufferedReader reader =
         new BufferedReader(new InputStreamReader(
-            Files.newInputStream(md5File.toPath()), Charsets.UTF_8));
+            Files.newInputStream(md5File.toPath()), StandardCharsets.UTF_8));
     String md5Line;
     try {
       md5Line = reader.readLine();
@@ -155,7 +155,7 @@ public abstract class MD5FileUtils {
     String md5Line = digestString + " *" + dataFile.getName() + "\n";
 
     AtomicFileOutputStream afos = new AtomicFileOutputStream(md5File);
-    afos.write(md5Line.getBytes(Charsets.UTF_8));
+    afos.write(md5Line.getBytes(StandardCharsets.UTF_8));
     afos.close();
 
     if (LOG.isDebugEnabled()) {

+ 3 - 4
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/PersistentLongFile.java

@@ -22,14 +22,13 @@ import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.IOUtils;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
-
 /**
  * Class that represents a file on disk which persistently stores
  * a single <code>long</code> value. The file is updated atomically
@@ -77,7 +76,7 @@ public class PersistentLongFile {
   public static void writeFile(File file, long val) throws IOException {
     AtomicFileOutputStream fos = new AtomicFileOutputStream(file);
     try {
-      fos.write(String.valueOf(val).getBytes(Charsets.UTF_8));
+      fos.write(String.valueOf(val).getBytes(StandardCharsets.UTF_8));
       fos.write('\n');
       fos.close();
       fos = null;
@@ -93,7 +92,7 @@ public class PersistentLongFile {
     if (file.exists()) {
       BufferedReader br = 
           new BufferedReader(new InputStreamReader(new FileInputStream(
-              file), Charsets.UTF_8));
+              file), StandardCharsets.UTF_8));
       try {
         val = Long.parseLong(br.readLine());
         br.close();

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java

@@ -53,6 +53,7 @@ import java.net.URI;
 import java.net.URL;
 import java.net.URLConnection;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.security.NoSuchAlgorithmException;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
@@ -70,7 +71,6 @@ import java.util.Set;
 import java.util.UUID;
 import java.util.concurrent.TimeoutException;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
 import org.apache.hadoop.util.Preconditions;
 import org.apache.hadoop.thirdparty.com.google.common.base.Strings;
@@ -985,7 +985,7 @@ public class DFSTestUtil {
    * @return url content as string (UTF-8 encoding assumed)
    */
   public static String urlGet(URL url) throws IOException {
-    return new String(urlGetBytes(url), Charsets.UTF_8);
+    return new String(urlGetBytes(url), StandardCharsets.UTF_8);
   }
   
   /**
@@ -1438,7 +1438,7 @@ public class DFSTestUtil {
     Short permission = 0777;
     filesystem.setPermission(pathFileCreate, new FsPermission(permission));
     // OP_SET_OWNER 8
-    filesystem.setOwner(pathFileCreate, new String("newOwner"), null);
+    filesystem.setOwner(pathFileCreate, "newOwner", null);
     // OP_CLOSE 9 see above
     // OP_SET_GENSTAMP 10 see above
     // OP_SET_NS_QUOTA 11 obsolete

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBalancerBandwidth.java

@@ -23,6 +23,7 @@ import static org.junit.Assert.assertTrue;
 import java.io.ByteArrayOutputStream;
 import java.io.PrintStream;
 import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.concurrent.TimeoutException;
 
@@ -46,7 +47,7 @@ public class TestBalancerBandwidth {
   final static private int DEFAULT_BANDWIDTH = 1024*1024;
   public static final Logger LOG =
       LoggerFactory.getLogger(TestBalancerBandwidth.class);
-  private static final Charset UTF8 = Charset.forName("UTF-8");
+  private static final Charset UTF8 = StandardCharsets.UTF_8;
   private final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
   private final PrintStream outStream = new PrintStream(outContent);
 

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRollback.java

@@ -23,6 +23,7 @@ import static org.junit.Assert.*;
 
 import java.io.File;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.Collections;
 import java.util.List;
 
@@ -41,7 +42,6 @@ import org.apache.hadoop.util.StringUtils;
 import org.junit.After;
 import org.junit.Test;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 
 /**
 * This test ensures the appropriate response (successful or failure) from
@@ -312,8 +312,8 @@ public class TestDFSRollback {
       for (File f : baseDirs) { 
         UpgradeUtilities.corruptFile(
             new File(f,"VERSION"),
-            "layoutVersion".getBytes(Charsets.UTF_8),
-            "xxxxxxxxxxxxx".getBytes(Charsets.UTF_8));
+            "layoutVersion".getBytes(StandardCharsets.UTF_8),
+            "xxxxxxxxxxxxx".getBytes(StandardCharsets.UTF_8));
       }
       startNameNodeShouldFail("file VERSION has layoutVersion missing");
 

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgrade.java

@@ -29,6 +29,7 @@ import static org.junit.Assert.fail;
 
 import java.io.File;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.regex.Pattern;
 
 import org.slf4j.Logger;
@@ -49,7 +50,6 @@ import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
 
 /**
@@ -335,8 +335,8 @@ public class TestDFSUpgrade {
       for (File f : baseDirs) { 
         UpgradeUtilities.corruptFile(
             new File(f,"VERSION"),
-            "layoutVersion".getBytes(Charsets.UTF_8),
-            "xxxxxxxxxxxxx".getBytes(Charsets.UTF_8));
+            "layoutVersion".getBytes(StandardCharsets.UTF_8),
+            "xxxxxxxxxxxxx".getBytes(StandardCharsets.UTF_8));
       }
       startNameNodeShouldFail(StartupOption.UPGRADE);
       UpgradeUtilities.createEmptyDirs(nameNodeDirs);

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeReport.java

@@ -160,7 +160,7 @@ public class TestDatanodeReport {
       cluster.waitActive();
       DistributedFileSystem fs = cluster.getFileSystem();
       Path p = new Path("/testDatanodeReportMissingBlock");
-      DFSTestUtil.writeFile(fs, p, new String("testdata"));
+      DFSTestUtil.writeFile(fs, p, "testdata");
       LocatedBlock lb = fs.getClient().getLocatedBlocks(p.toString(), 0).get(0);
       assertEquals(3, lb.getLocations().length);
       ExtendedBlock b = lb.getBlock();

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMultipleNNPortQOP.java

@@ -18,6 +18,7 @@
 package org.apache.hadoop.hdfs;
 
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -287,7 +288,7 @@ public class TestMultipleNNPortQOP extends SaslDataTransferTestCase {
   private void doTest(FileSystem fs, Path path) throws Exception {
     FileSystemTestHelper.createFile(fs, path, NUM_BLOCKS, BLOCK_SIZE);
     assertArrayEquals(FileSystemTestHelper.getFileData(NUM_BLOCKS, BLOCK_SIZE),
-        DFSTestUtil.readFile(fs, path).getBytes("UTF-8"));
+        DFSTestUtil.readFile(fs, path).getBytes(StandardCharsets.UTF_8));
     BlockLocation[] blockLocations = fs.getFileBlockLocations(path, 0,
         Long.MAX_VALUE);
     assertNotNull(blockLocations);

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java

@@ -32,6 +32,7 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
 import java.security.PrivilegedExceptionAction;
 import java.util.List;
 import java.util.Scanner;
@@ -66,7 +67,6 @@ import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.junit.rules.Timeout;
 import org.slf4j.Logger;
 import org.slf4j.event.Level;
@@ -1216,7 +1216,7 @@ public class TestQuota {
       String[] args =
           { "-setSpaceQuota", "100", "-storageType", "COLD", "/testDir" };
       admin.run(args);
-      String errOutput = new String(err.toByteArray(), Charsets.UTF_8);
+      String errOutput = new String(err.toByteArray(), StandardCharsets.UTF_8);
       assertTrue(
           errOutput.contains(StorageType.getTypesSupportingQuota().toString()));
     } finally {

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/TestSaslDataTransfer.java

@@ -32,6 +32,7 @@ import java.net.InetAddress;
 import java.net.ServerSocket;
 import java.net.Socket;
 import java.net.SocketTimeoutException;
+import java.nio.charset.StandardCharsets;
 import java.util.concurrent.atomic.AtomicBoolean;
 
 import org.slf4j.LoggerFactory;
@@ -200,7 +201,7 @@ public class TestSaslDataTransfer extends SaslDataTransferTestCase {
     fs = FileSystem.get(cluster.getURI(), conf);
     FileSystemTestHelper.createFile(fs, PATH, NUM_BLOCKS, BLOCK_SIZE);
     assertArrayEquals(FileSystemTestHelper.getFileData(NUM_BLOCKS, BLOCK_SIZE),
-      DFSTestUtil.readFile(fs, PATH).getBytes("UTF-8"));
+      DFSTestUtil.readFile(fs, PATH).getBytes(StandardCharsets.UTF_8));
     BlockLocation[] blockLocations = fs.getFileBlockLocations(PATH, 0,
       Long.MAX_VALUE);
     assertNotNull(blockLocations);

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNode.java

@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.hdfs.qjournal.server;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.hadoop.thirdparty.com.google.common.primitives.Bytes;
 import org.apache.hadoop.thirdparty.com.google.common.primitives.Ints;
 import org.apache.hadoop.conf.Configuration;
@@ -54,6 +53,7 @@ import java.io.File;
 import java.io.IOException;
 import java.net.HttpURLConnection;
 import java.net.URL;
+import java.nio.charset.StandardCharsets;
 import java.util.Collection;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.TimeUnit;
@@ -278,7 +278,7 @@ public class TestJournalNode {
     ch.newEpoch(1).get();
     ch.setEpoch(1);
     ch.startLogSegment(1, NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION).get();
-    ch.sendEdits(1L, 1, 1, "hello".getBytes(Charsets.UTF_8)).get();
+    ch.sendEdits(1L, 1, 1, "hello".getBytes(StandardCharsets.UTF_8)).get();
     
     metrics = MetricsAsserts.getMetrics(
         journal.getMetrics().getName());
@@ -291,7 +291,7 @@ public class TestJournalNode {
     beginTimestamp = lastJournalTimestamp;
 
     ch.setCommittedTxId(100L);
-    ch.sendEdits(1L, 2, 1, "goodbye".getBytes(Charsets.UTF_8)).get();
+    ch.sendEdits(1L, 2, 1, "goodbye".getBytes(StandardCharsets.UTF_8)).get();
 
     metrics = MetricsAsserts.getMetrics(
         journal.getMetrics().getName());

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMetrics.java

@@ -553,7 +553,7 @@ public class TestDataNodeMetrics {
       cluster.waitActive();
       DistributedFileSystem fs = cluster.getFileSystem();
       Path p = new Path("/testShouldThrowTMP");
-      DFSTestUtil.writeFile(fs, p, new String("testdata"));
+      DFSTestUtil.writeFile(fs, p, "testdata");
       //Before DN throws too many open files
       verifyBlockLocations(fs, p, 1);
       Mockito.doThrow(new FileNotFoundException("Too many open files")).

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFavoredNodesEndToEnd.java

@@ -60,7 +60,7 @@ public class TestFavoredNodesEndToEnd {
   private static Configuration conf;
   private final static int NUM_DATA_NODES = 10;
   private final static int NUM_FILES = 10;
-  private final static byte[] SOME_BYTES = new String("foo").getBytes();
+  private final static byte[] SOME_BYTES = "foo".getBytes();
   private static DistributedFileSystem dfs;
   private static ArrayList<DataNode> datanodes;
   

+ 11 - 11
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java

@@ -919,7 +919,7 @@ public class TestFsck {
     dfs = cluster.getFileSystem();
 
     // create files
-    final String testFile = new String("/testfile");
+    final String testFile = "/testfile";
     final Path path = new Path(testFile);
     DFSTestUtil.createFile(dfs, path, fileSize, replFactor, 1000L);
     DFSTestUtil.waitReplication(dfs, path, replFactor);
@@ -1202,7 +1202,7 @@ public class TestFsck {
     assertNotNull("Failed to get FileSystem", dfs);
 
     // Create a file that will be intentionally under-replicated
-    final String pathString = new String("/testfile");
+    final String pathString = "/testfile";
     final Path path = new Path(pathString);
     long fileLen = blockSize * numBlocks;
     DFSTestUtil.createFile(dfs, path, fileLen, replFactor, 1);
@@ -1263,7 +1263,7 @@ public class TestFsck {
     assertNotNull("Failed to get FileSystem", dfs);
 
     // Create a file that will be intentionally under-replicated
-    final String pathString = new String("/testfile");
+    final String pathString = "/testfile";
     final Path path = new Path(pathString);
     long fileLen = blockSize * numBlocks;
     DFSTestUtil.createFile(dfs, path, fileLen, replFactor, 1);
@@ -1436,7 +1436,7 @@ public class TestFsck {
     DFSTestUtil util = new DFSTestUtil.Builder().
         setName(getClass().getSimpleName()).setNumFiles(1).build();
     //create files
-    final String pathString = new String("/testfile");
+    final String pathString = "/testfile";
     final Path path = new Path(pathString);
     util.createFile(dfs, path, 1024, replFactor, 1000L);
     util.waitReplication(dfs, path, replFactor);
@@ -1490,7 +1490,7 @@ public class TestFsck {
     DFSTestUtil util = new DFSTestUtil.Builder().
         setName(getClass().getSimpleName()).setNumFiles(1).build();
     //create files
-    final String pathString = new String("/testfile");
+    final String pathString = "/testfile";
     final Path path = new Path(pathString);
     util.createFile(dfs, path, 1024, replFactor, 1000L);
     util.waitReplication(dfs, path, replFactor);
@@ -1577,7 +1577,7 @@ public class TestFsck {
     DFSTestUtil util = new DFSTestUtil.Builder().
         setName(getClass().getSimpleName()).setNumFiles(1).build();
     //create files
-    final String pathString = new String("/testfile");
+    final String pathString = "/testfile";
     final Path path = new Path(pathString);
     util.createFile(dfs, path, 1024, replFactor, 1000L);
     util.waitReplication(dfs, path, replFactor);
@@ -1694,7 +1694,7 @@ public class TestFsck {
           setName(getClass().getSimpleName()).setNumFiles(1).build();
 
       // Create one file.
-      final String pathString = new String("/testfile");
+      final String pathString = "/testfile";
       final Path path = new Path(pathString);
       util.createFile(fs, path, 1024L, replFactor, 1024L);
       util.waitReplication(fs, path, replFactor);
@@ -1780,7 +1780,7 @@ public class TestFsck {
     DFSTestUtil util = new DFSTestUtil.Builder().
         setName(getClass().getSimpleName()).setNumFiles(1).build();
     //create files
-    final String pathString = new String("/testfile");
+    final String pathString = "/testfile";
     final Path path = new Path(pathString);
     util.createFile(dfs, path, 1024, repFactor, 1000L);
     util.waitReplication(dfs, path, repFactor);
@@ -1937,7 +1937,7 @@ public class TestFsck {
         setName(getClass().getSimpleName()).setNumFiles(1).build();
 
     //create files
-    final String testFile = new String("/testfile");
+    final String testFile = "/testfile";
     final Path path = new Path(testFile);
     util.createFile(dfs, path, fileSize, replFactor, 1000L);
     util.waitReplication(dfs, path, replFactor);
@@ -2020,7 +2020,7 @@ public class TestFsck {
     DFSTestUtil util = new DFSTestUtil.Builder().
         setName(getClass().getSimpleName()).setNumFiles(1).build();
     //create files
-    final String testFile = new String("/testfile");
+    final String testFile = "/testfile";
     final Path path = new Path(testFile);
     util.createFile(dfs, path, 1024, replFactor, 1000L);
     util.waitReplication(dfs, path, replFactor);
@@ -2394,7 +2394,7 @@ public class TestFsck {
     }
 
     // create files
-    final String testFile = new String("/testfile");
+    final String testFile = "/testfile";
     final Path path = new Path(testFile);
     DFSTestUtil.createFile(dfs, path, fileSize, replFactor, 1000L);
     DFSTestUtil.waitReplication(dfs, path, replFactor);

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestINodeFile.java

@@ -1163,7 +1163,7 @@ public class TestINodeFile {
           HdfsFileStatus.EMPTY_NAME, false);
       assertTrue(dl.getPartialListing().length == 3);
 
-      String f2 = new String("f2");
+      String f2 = "f2";
       dl = cluster.getNameNodeRpc().getListing("/tmp", f2.getBytes(), false);
       assertTrue(dl.getPartialListing().length == 1);
 

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java

@@ -24,6 +24,7 @@ import static org.mockito.Mockito.*;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.Map;
@@ -245,7 +246,7 @@ public class TestStartupProgressServlet {
    */
   private String doGetAndReturnResponseBody() throws IOException {
     servlet.doGet(req, resp);
-    return new String(respOut.toByteArray(), "UTF-8");
+    return new String(respOut.toByteArray(), StandardCharsets.UTF_8);
   }
 
   /**

+ 5 - 5
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java

@@ -19,8 +19,8 @@ package org.apache.hadoop.hdfs.tools;
 
 import java.io.ByteArrayOutputStream;
 import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -56,8 +56,8 @@ public class TestDFSAdminWithHA {
   private static String newLine = System.getProperty("line.separator");
 
   private void assertOutputMatches(String string) {
-    String errOutput = new String(err.toByteArray(), Charsets.UTF_8);
-    String output = new String(out.toByteArray(), Charsets.UTF_8);
+    String errOutput = new String(err.toByteArray(), StandardCharsets.UTF_8);
+    String output = new String(out.toByteArray(), StandardCharsets.UTF_8);
 
     if (!errOutput.matches(string) && !output.matches(string)) {
       fail("Expected output to match '" + string +
@@ -70,8 +70,8 @@ public class TestDFSAdminWithHA {
   }
 
   private void assertOutputMatches(String outMessage, String errMessage) {
-    String errOutput = new String(err.toByteArray(), Charsets.UTF_8);
-    String output = new String(out.toByteArray(), Charsets.UTF_8);
+    String errOutput = new String(err.toByteArray(), StandardCharsets.UTF_8);
+    String output = new String(out.toByteArray(), StandardCharsets.UTF_8);
 
     if (!errOutput.matches(errMessage) || !output.matches(outMessage)) {
       fail("Expected output to match '" + outMessage + " and " + errMessage +

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdmin.java

@@ -26,6 +26,7 @@ import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -48,7 +49,6 @@ import org.junit.Test;
 import org.mockito.ArgumentCaptor;
 import org.mockito.Mockito;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
 
 public class TestDFSHAAdmin {
@@ -435,8 +435,8 @@ public class TestDFSHAAdmin {
     outBytes.reset();
     LOG.info("Running: DFSHAAdmin " + Joiner.on(" ").join(args));
     int ret = tool.run(args);
-    errOutput = new String(errOutBytes.toByteArray(), Charsets.UTF_8);
-    output = new String(outBytes.toByteArray(), Charsets.UTF_8);
+    errOutput = new String(errOutBytes.toByteArray(), StandardCharsets.UTF_8);
+    output = new String(outBytes.toByteArray(), StandardCharsets.UTF_8);
     LOG.info("Err_output:\n" + errOutput + "\nOutput:\n" + output);
     return ret;
   }

+ 4 - 4
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java

@@ -27,6 +27,7 @@ import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.IOException;
 import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -45,7 +46,6 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
 import org.apache.hadoop.thirdparty.com.google.common.io.Files;
 
@@ -232,7 +232,7 @@ public class TestDFSHAAdminMiniCluster {
     assertEquals(0, runTool("-ns", "minidfs-ns", "-failover", "nn2", "nn1"));
 
     // Fencer has not run yet, since none of the above required fencing 
-    assertEquals("", Files.asCharSource(tmpFile, Charsets.UTF_8).read());
+    assertEquals("", Files.asCharSource(tmpFile, StandardCharsets.UTF_8).read());
 
     // Test failover with fencer and forcefence option
     assertEquals(0, runTool("-failover", "nn1", "nn2", "--forcefence"));
@@ -240,7 +240,7 @@ public class TestDFSHAAdminMiniCluster {
     // The fence script should run with the configuration from the target
     // node, rather than the configuration from the fencing node. Strip
     // out any trailing spaces and CR/LFs which may be present on Windows.
-    String fenceCommandOutput = Files.asCharSource(tmpFile, Charsets.UTF_8)
+    String fenceCommandOutput = Files.asCharSource(tmpFile, StandardCharsets.UTF_8)
         .read().replaceAll(" *[\r\n]+", "");
     assertEquals("minidfs-ns.nn1 " + nn1Port + " nn1", fenceCommandOutput);
     tmpFile.delete();
@@ -325,7 +325,7 @@ public class TestDFSHAAdminMiniCluster {
     errOutBytes.reset();
     LOG.info("Running: DFSHAAdmin " + Joiner.on(" ").join(args));
     int ret = tool.run(args);
-    errOutput = new String(errOutBytes.toByteArray(), Charsets.UTF_8);
+    errOutput = new String(errOutBytes.toByteArray(), StandardCharsets.UTF_8);
     LOG.info("Output:\n" + errOutput);
     return ret;
   }

+ 3 - 2
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java

@@ -27,6 +27,7 @@ import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.net.HttpURLConnection;
 import java.net.URL;
+import java.nio.charset.StandardCharsets;
 import java.text.MessageFormat;
 import java.util.Arrays;
 import java.util.Map;
@@ -341,7 +342,7 @@ public class TestWebHdfsFileSystemContract extends FileSystemContractBaseTest {
       byte[] respBody = new byte[content.length()];
       is = conn.getInputStream();
       IOUtils.readFully(is, respBody, 0, content.length());
-      assertEquals(content, new String(respBody, "US-ASCII"));
+      assertEquals(content, new String(respBody, StandardCharsets.US_ASCII));
     } finally {
       IOUtils.closeStream(is);
       if (conn != null) {
@@ -392,7 +393,7 @@ public class TestWebHdfsFileSystemContract extends FileSystemContractBaseTest {
       byte[] respBody = new byte[content.length() - 1];
       is = conn.getInputStream();
       IOUtils.readFully(is, respBody, 0, content.length() - 1);
-      assertEquals(content.substring(1), new String(respBody, "US-ASCII"));
+      assertEquals(content.substring(1), new String(respBody, StandardCharsets.US_ASCII));
     } finally {
       IOUtils.closeStream(is);
       if (conn != null) {

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java

@@ -31,6 +31,7 @@ import java.net.ServerSocket;
 import java.net.Socket;
 import java.net.SocketTimeoutException;
 import java.nio.channels.SocketChannel;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -332,7 +333,7 @@ public class TestWebHdfsTimeouts {
 
           // Write response.
           out = clientSocket.getOutputStream();
-          out.write(temporaryRedirect().getBytes("UTF-8"));
+          out.write(temporaryRedirect().getBytes(StandardCharsets.UTF_8));
         } catch (IOException e) {
           // Fail the test on any I/O error in the server thread.
           LOG.error("unexpected IOException in server thread", e);

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestHdfsConfigFields.java

@@ -39,7 +39,7 @@ public class TestHdfsConfigFields extends TestConfigurationFieldsBase {
 
   @Override
   public void initializeMemberVariables() {
-    xmlFilename = new String("hdfs-default.xml");
+    xmlFilename = "hdfs-default.xml";
     configurationClasses = new Class[] { HdfsClientConfigKeys.class,
         HdfsClientConfigKeys.Failover.class,
         HdfsClientConfigKeys.StripedRead.class, DFSConfigKeys.class,

+ 2 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java

@@ -28,6 +28,7 @@ import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
 
+import java.nio.charset.StandardCharsets;
 import java.util.function.Supplier;
 import java.io.File;
 import java.io.FileInputStream;
@@ -2097,7 +2098,7 @@ public class TestRecovery {
     String contents = null;
     try {
       in.read(buf, 0, len);
-      contents = new String(buf, "UTF-8");
+      contents = new String(buf, StandardCharsets.UTF_8);
     } finally {
       in.close();
     }

+ 3 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java

@@ -21,6 +21,7 @@ import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
 import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -31,7 +32,6 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 
 /**
  * <code>JobQueueClient</code> is interface provided to the user in order to get
@@ -148,7 +148,7 @@ class JobQueueClient extends Configured implements Tool {
     JobQueueInfo[] rootQueues = jc.getRootQueues();
     for (JobQueueInfo queue : rootQueues) {
       printJobQueueInfo(queue, new PrintWriter(new OutputStreamWriter(
-          System.out, Charsets.UTF_8)));
+          System.out, StandardCharsets.UTF_8)));
     }
   }
   
@@ -187,7 +187,7 @@ class JobQueueClient extends Configured implements Tool {
       return;
     }
     printJobQueueInfo(jobQueueInfo, new PrintWriter(new OutputStreamWriter(
-        System.out, Charsets.UTF_8)));
+        System.out, StandardCharsets.UTF_8)));
     if (showJobs && (jobQueueInfo.getChildren() == null ||
         jobQueueInfo.getChildren().size() == 0)) {
       JobStatus[] jobs = jobQueueInfo.getJobStatuses();

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java

@@ -27,6 +27,7 @@ import java.io.Flushable;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Enumeration;
 import java.util.List;
@@ -56,7 +57,6 @@ import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 
 /**
  * A simple logger to handle the task-specific user logs.
@@ -114,7 +114,7 @@ public class TaskLog {
     File indexFile = getIndexFile(taskid, isCleanup);
     BufferedReader fis = new BufferedReader(new InputStreamReader(
       SecureIOUtils.openForRead(indexFile, obtainLogDirOwner(taskid), null),
-      Charsets.UTF_8));
+      StandardCharsets.UTF_8));
     //the format of the index file is
     //LOG_DIR: <the dir where the task logs are really stored>
     //stdout:<start-offset in the stdout file> <length>

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TextInputFormat.java

@@ -19,6 +19,7 @@
 package org.apache.hadoop.mapred;
 
 import java.io.*;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -27,7 +28,6 @@ import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.compress.*;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 
 /** 
  * An {@link InputFormat} for plain text files.  Files are broken into lines.
@@ -62,7 +62,7 @@ public class TextInputFormat extends FileInputFormat<LongWritable, Text>
     String delimiter = job.get("textinputformat.record.delimiter");
     byte[] recordDelimiterBytes = null;
     if (null != delimiter) {
-      recordDelimiterBytes = delimiter.getBytes(Charsets.UTF_8);
+      recordDelimiterBytes = delimiter.getBytes(StandardCharsets.UTF_8);
     }
     return new LineRecordReader(job, (FileSplit) genericSplit,
         recordDelimiterBytes);

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java

@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.net.InetAddress;
 import java.net.URI;
 import java.net.URISyntaxException;
+import java.nio.charset.StandardCharsets;
 import java.security.NoSuchAlgorithmException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -63,7 +64,6 @@ import org.apache.hadoop.util.JsonSerialization;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.yarn.api.records.ReservationId;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
@@ -409,7 +409,7 @@ class JobSubmitter {
 
         for(Map.Entry<String, String> ent: nm.entrySet()) {
           credentials.addSecretKey(new Text(ent.getKey()), ent.getValue()
-              .getBytes(Charsets.UTF_8));
+              .getBytes(StandardCharsets.UTF_8));
         }
       } catch (JsonMappingException | JsonParseException e) {
         LOG.warn("couldn't parse Token Cache JSON file with user secret keys");

+ 2 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JSONHistoryViewerPrinter.java

@@ -33,6 +33,7 @@ import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.PrintStream;
 import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 import java.util.Iterator;
 import java.util.Map;
 
@@ -72,7 +73,7 @@ class JSONHistoryViewerPrinter implements HistoryViewerPrinter {
       printTaskSummary();
       printTasks();
 
-      writer = new OutputStreamWriter(ps, "UTF-8");
+      writer = new OutputStreamWriter(ps, StandardCharsets.UTF_8);
       json.write(writer);
       writer.flush();
     } catch (JSONException je) {

+ 3 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java

@@ -32,7 +32,8 @@ import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
+import java.nio.charset.StandardCharsets;
+
 
 /** An {@link InputFormat} for plain text files.  Files are broken into lines.
  * Either linefeed or carriage-return are used to signal end of line.  Keys are
@@ -49,7 +50,7 @@ public class TextInputFormat extends FileInputFormat<LongWritable, Text> {
         "textinputformat.record.delimiter");
     byte[] recordDelimiterBytes = null;
     if (null != delimiter)
-      recordDelimiterBytes = delimiter.getBytes(Charsets.UTF_8);
+      recordDelimiterBytes = delimiter.getBytes(StandardCharsets.UTF_8);
     return new LineRecordReader(recordDelimiterBytes);
   }
 

+ 2 - 7
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldBasedPartitioner.java

@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.mapreduce.lib.partition;
 
-import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -90,12 +90,7 @@ public class KeyFieldBasedPartitioner<K2, V2> extends Partitioner<K2, V2>
       return getPartition(key.toString().hashCode(), numReduceTasks);
     }
 
-    try {
-      keyBytes = key.toString().getBytes("UTF-8");
-    } catch (UnsupportedEncodingException e) {
-      throw new RuntimeException("The current system does not " +
-          "support UTF-8 encoding!", e);
-    }
+    keyBytes = key.toString().getBytes(StandardCharsets.UTF_8);
     // return 0 if the key is empty
     if (keyBytes.length == 0) {
       return 0;

+ 3 - 8
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldHelper.java

@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.mapreduce.lib.partition;
 
-import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.ArrayList;
 import java.util.StringTokenizer;
@@ -61,13 +61,8 @@ class KeyFieldHelper {
   private boolean keySpecSeen = false;
   
   public void setKeyFieldSeparator(String keyFieldSeparator) {
-    try {
-      this.keyFieldSeparator =
-        keyFieldSeparator.getBytes("UTF-8");
-    } catch (UnsupportedEncodingException e) {
-      throw new RuntimeException("The current system does not " +
-          "support UTF-8 encoding!", e);
-    }    
+    this.keyFieldSeparator =
+      keyFieldSeparator.getBytes(StandardCharsets.UTF_8);
   }
   
   /** Required for backcompatibility with num.key.fields.for.partition in

+ 6 - 7
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java

@@ -23,6 +23,7 @@ import java.io.IOException;
 import java.io.PrintStream;
 import java.io.UnsupportedEncodingException;
 import java.net.URL;
+import java.nio.charset.StandardCharsets;
 import javax.crypto.SecretKey;
 import javax.servlet.http.HttpServletRequest;
 
@@ -34,7 +35,6 @@ import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 
 /**
  * 
@@ -56,7 +56,7 @@ public class SecureShuffleUtils {
    */
   public static String generateHash(byte[] msg, SecretKey key) {
     return new String(Base64.encodeBase64(generateByteHash(msg, key)), 
-        Charsets.UTF_8);
+        StandardCharsets.UTF_8);
   }
   
   /**
@@ -70,7 +70,6 @@ public class SecureShuffleUtils {
   
   /**
    * verify that hash equals to HMacHash(msg)
-   * @param newHash
    * @return true if is the same
    */
   private static boolean verifyHash(byte[] hash, byte[] msg, SecretKey key) {
@@ -87,7 +86,7 @@ public class SecureShuffleUtils {
    */
   public static String hashFromString(String enc_str, SecretKey key) 
   throws IOException {
-    return generateHash(enc_str.getBytes(Charsets.UTF_8), key); 
+    return generateHash(enc_str.getBytes(StandardCharsets.UTF_8), key);
   }
   
   /**
@@ -98,9 +97,9 @@ public class SecureShuffleUtils {
    */
   public static void verifyReply(String base64Hash, String msg, SecretKey key)
   throws IOException {
-    byte[] hash = Base64.decodeBase64(base64Hash.getBytes(Charsets.UTF_8));
+    byte[] hash = Base64.decodeBase64(base64Hash.getBytes(StandardCharsets.UTF_8));
     
-    boolean res = verifyHash(hash, msg.getBytes(Charsets.UTF_8), key);
+    boolean res = verifyHash(hash, msg.getBytes(StandardCharsets.UTF_8), key);
     
     if(res != true) {
       throw new IOException("Verification of the hashReply failed");
@@ -148,7 +147,7 @@ public class SecureShuffleUtils {
       for (byte b : ba) {
         ps.printf("%x", b);
       }
-      strHex = baos.toString("UTF-8");
+      strHex = new String(baos.toByteArray(), StandardCharsets.UTF_8);
     } catch (UnsupportedEncodingException e) {
     }
     return strHex;

Einige Dateien werden nicht angezeigt, da zu viele Dateien in diesem Diff geändert wurden.