Browse Source

HADOOP-13444. Replace org.apache.commons.io.Charsets with java.nio.charset.StandardCharsets. Contributed by Vincent Poon.

(cherry picked from commit 5469e128d3e1150b7bf72a4c036a3fbbddc7f7d7)
Akira Ajisaka 8 years ago
parent
commit
f39ed9ea8e
37 changed files with 134 additions and 118 deletions
  1. 5 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
  2. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
  3. 4 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
  4. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/StreamPumper.java
  5. 28 14
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java
  6. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java
  7. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
  8. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java
  9. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java
  10. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
  11. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
  12. 3 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
  13. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java
  14. 2 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java
  15. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
  16. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
  17. 4 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
  18. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java
  19. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java
  20. 0 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java
  21. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java
  22. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java
  23. 2 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
  24. 5 3
      hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java
  25. 3 2
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java
  26. 2 2
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java
  27. 3 3
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/CREATE3Request.java
  28. 3 2
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LINK3Request.java
  29. 4 4
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LOOKUP3Request.java
  30. 4 4
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKDIR3Request.java
  31. 2 2
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKNOD3Request.java
  32. 4 4
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/REMOVE3Request.java
  33. 6 6
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RENAME3Request.java
  34. 4 4
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RMDIR3Request.java
  35. 6 6
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/SYMLINK3Request.java
  36. 4 4
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/XDR.java
  37. 3 3
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsSys.java

+ 5 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java

@@ -23,6 +23,7 @@ import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.OutputStreamWriter;
+import java.nio.charset.StandardCharsets;
 import java.security.NoSuchAlgorithmException;
 import java.util.Collections;
 import java.util.Date;
@@ -32,7 +33,6 @@ import java.util.Map;
 
 import com.google.gson.stream.JsonReader;
 import com.google.gson.stream.JsonWriter;
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -209,7 +209,7 @@ public abstract class KeyProvider {
     protected byte[] serialize() throws IOException {
       ByteArrayOutputStream buffer = new ByteArrayOutputStream();
       JsonWriter writer = new JsonWriter(
-          new OutputStreamWriter(buffer, Charsets.UTF_8));
+          new OutputStreamWriter(buffer, StandardCharsets.UTF_8));
       try {
         writer.beginObject();
         if (cipher != null) {
@@ -252,8 +252,9 @@ public abstract class KeyProvider {
       int versions = 0;
       String description = null;
       Map<String, String> attributes = null;
-      JsonReader reader = new JsonReader(new InputStreamReader
-        (new ByteArrayInputStream(bytes), Charsets.UTF_8));
+      JsonReader reader =
+          new JsonReader(new InputStreamReader(new ByteArrayInputStream(bytes),
+              StandardCharsets.UTF_8));
       try {
         reader.beginObject();
         while (reader.hasNext()) {

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java

@@ -18,7 +18,6 @@
 package org.apache.hadoop.crypto.key.kms;
 
 import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.crypto.key.KeyProvider;
@@ -64,6 +63,7 @@ import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
 import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
 import java.security.GeneralSecurityException;
 import java.security.NoSuchAlgorithmException;
 import java.security.PrivilegedExceptionAction;
@@ -270,7 +270,7 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension,
   }
 
   private static void writeJson(Map map, OutputStream os) throws IOException {
-    Writer writer = new OutputStreamWriter(os, Charsets.UTF_8);
+    Writer writer = new OutputStreamWriter(os, StandardCharsets.UTF_8);
     ObjectMapper jsonMapper = new ObjectMapper();
     jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, map);
   }

+ 4 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java

@@ -22,6 +22,7 @@ import java.io.EOFException;
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.LinkedList;
 import java.util.zip.GZIPInputStream;
 
@@ -33,7 +34,6 @@ import org.apache.avro.generic.GenericDatumWriter;
 import org.apache.avro.io.DatumWriter;
 import org.apache.avro.io.EncoderFactory;
 import org.apache.avro.io.JsonEncoder;
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -237,10 +237,10 @@ class Display extends FsCommand {
         if (!r.next(key, val)) {
           return -1;
         }
-        byte[] tmp = key.toString().getBytes(Charsets.UTF_8);
+        byte[] tmp = key.toString().getBytes(StandardCharsets.UTF_8);
         outbuf.write(tmp, 0, tmp.length);
         outbuf.write('\t');
-        tmp = val.toString().getBytes(Charsets.UTF_8);
+        tmp = val.toString().getBytes(StandardCharsets.UTF_8);
         outbuf.write(tmp, 0, tmp.length);
         outbuf.write('\n');
         inbuf.reset(outbuf.getData(), outbuf.getLength());
@@ -303,7 +303,7 @@ class Display extends FsCommand {
       if (!fileReader.hasNext()) {
         // Write a new line after the last Avro record.
         output.write(System.getProperty("line.separator")
-                         .getBytes(Charsets.UTF_8));
+                         .getBytes(StandardCharsets.UTF_8));
         output.flush();
       }
       pos = 0;

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/StreamPumper.java

@@ -21,8 +21,8 @@ import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 
 /**
@@ -78,7 +78,7 @@ class StreamPumper {
 
   protected void pump() throws IOException {
     InputStreamReader inputStreamReader = new InputStreamReader(
-        stream, Charsets.UTF_8);
+        stream, StandardCharsets.UTF_8);
     BufferedReader br = new BufferedReader(inputStreamReader);
     String line = null;
     while ((line = br.readLine()) != null) {

+ 28 - 14
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java

@@ -17,21 +17,25 @@
  */
 package org.apache.hadoop.http;
 
-import org.apache.commons.io.Charsets;
-
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
 
 /**
  * This class is responsible for quoting HTML characters.
  */
 public class HtmlQuoting {
-  private static final byte[] ampBytes = "&amp;".getBytes(Charsets.UTF_8);
-  private static final byte[] aposBytes = "&apos;".getBytes(Charsets.UTF_8);
-  private static final byte[] gtBytes = "&gt;".getBytes(Charsets.UTF_8);
-  private static final byte[] ltBytes = "&lt;".getBytes(Charsets.UTF_8);
-  private static final byte[] quotBytes = "&quot;".getBytes(Charsets.UTF_8);
+  private static final byte[] AMP_BYTES =
+      "&amp;".getBytes(StandardCharsets.UTF_8);
+  private static final byte[] APOS_BYTES =
+      "&apos;".getBytes(StandardCharsets.UTF_8);
+  private static final byte[] GT_BYTES =
+      "&gt;".getBytes(StandardCharsets.UTF_8);
+  private static final byte[] LT_BYTES =
+      "&lt;".getBytes(StandardCharsets.UTF_8);
+  private static final byte[] QUOT_BYTES =
+      "&quot;".getBytes(StandardCharsets.UTF_8);
 
   /**
    * Does the given string need to be quoted?
@@ -65,7 +69,7 @@ public class HtmlQuoting {
     if (str == null) {
       return false;
     }
-    byte[] bytes = str.getBytes(Charsets.UTF_8);
+    byte[] bytes = str.getBytes(StandardCharsets.UTF_8);
     return needsQuoting(bytes, 0 , bytes.length);
   }
 
@@ -81,11 +85,21 @@ public class HtmlQuoting {
                                     int off, int len) throws IOException {
     for(int i=off; i < off+len; i++) {
       switch (buffer[i]) {
-      case '&': output.write(ampBytes); break;
-      case '<': output.write(ltBytes); break;
-      case '>': output.write(gtBytes); break;
-      case '\'': output.write(aposBytes); break;
-      case '"': output.write(quotBytes); break;
+      case '&':
+        output.write(AMP_BYTES);
+        break;
+      case '<':
+        output.write(LT_BYTES);
+        break;
+      case '>':
+        output.write(GT_BYTES);
+        break;
+      case '\'':
+        output.write(APOS_BYTES);
+        break;
+      case '"':
+        output.write(QUOT_BYTES);
+        break;
       default: output.write(buffer, i, 1);
       }
     }
@@ -100,7 +114,7 @@ public class HtmlQuoting {
     if (item == null) {
       return null;
     }
-    byte[] bytes = item.getBytes(Charsets.UTF_8);
+    byte[] bytes = item.getBytes(StandardCharsets.UTF_8);
     if (needsQuoting(bytes, 0, bytes.length)) {
       ByteArrayOutputStream buffer = new ByteArrayOutputStream();
       try {

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java

@@ -19,11 +19,11 @@
 package org.apache.hadoop.io;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.nio.charset.UnsupportedCharsetException;
 import java.util.ArrayList;
 
 import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -91,7 +91,7 @@ public class DefaultStringifier<T> implements Stringifier<T> {
     serializer.serialize(obj);
     byte[] buf = new byte[outBuf.getLength()];
     System.arraycopy(outBuf.getData(), 0, buf, 0, buf.length);
-    return new String(Base64.encodeBase64(buf), Charsets.UTF_8);
+    return new String(Base64.encodeBase64(buf), StandardCharsets.UTF_8);
   }
 
   @Override

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java

@@ -19,11 +19,11 @@
 package org.apache.hadoop.io;
 
 import java.io.*;
+import java.nio.charset.StandardCharsets;
 import java.util.*;
 import java.rmi.server.UID;
 import java.security.MessageDigest;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.*;
 import org.apache.hadoop.util.Options;
 import org.apache.hadoop.fs.*;
@@ -853,7 +853,7 @@ public class SequenceFile {
       try {                                       
         MessageDigest digester = MessageDigest.getInstance("MD5");
         long time = Time.now();
-        digester.update((new UID()+"@"+time).getBytes(Charsets.UTF_8));
+        digester.update((new UID()+"@"+time).getBytes(StandardCharsets.UTF_8));
         sync = digester.digest();
       } catch (Exception e) {
         throw new RuntimeException(e);

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java

@@ -22,8 +22,8 @@ import java.io.BufferedInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 
@@ -287,7 +287,7 @@ public class BZip2Codec implements Configurable, SplittableCompressionCodec {
         // The compressed bzip2 stream should start with the
         // identifying characters BZ. Caller of CBZip2OutputStream
         // i.e. this class must write these characters.
-        out.write(HEADER.getBytes(Charsets.UTF_8));
+        out.write(HEADER.getBytes(StandardCharsets.UTF_8));
       }
     }
 
@@ -421,7 +421,7 @@ public class BZip2Codec implements Configurable, SplittableCompressionCodec {
         byte[] headerBytes = new byte[HEADER_LEN];
         int actualRead = bufferedIn.read(headerBytes, 0, HEADER_LEN);
         if (actualRead != -1) {
-          String header = new String(headerBytes, Charsets.UTF_8);
+          String header = new String(headerBytes, StandardCharsets.UTF_8);
           if (header.compareTo(HEADER) != 0) {
             bufferedIn.reset();
           } else {

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java

@@ -18,13 +18,13 @@ package org.apache.hadoop.io.file.tfile;
 
 import java.io.IOException;
 import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
 import java.util.Collection;
 import java.util.Iterator;
 import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -234,7 +234,7 @@ class TFileDumper {
               out.printf("%X", b);
             }
           } else {
-            out.print(new String(key, 0, sampleLen, Charsets.UTF_8));
+            out.print(new String(key, 0, sampleLen, StandardCharsets.UTF_8));
           }
           if (sampleLen < key.length) {
             out.print("...");

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java

@@ -18,8 +18,8 @@
 package org.apache.hadoop.ipc;
 
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.classification.InterfaceAudience;
 
 @InterfaceAudience.Private
@@ -40,8 +40,8 @@ public class RpcConstants {
   /**
    * The first four bytes of Hadoop RPC connections
    */
-  public static final ByteBuffer HEADER = ByteBuffer.wrap("hrpc".getBytes
-      (Charsets.UTF_8));
+  public static final ByteBuffer HEADER =
+      ByteBuffer.wrap("hrpc".getBytes(StandardCharsets.UTF_8));
   
   // 1 : Introduce ping and server does not throw away RPCs
   // 3 : Introduce the protocol into the RPC connection header

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

@@ -46,6 +46,7 @@ import java.nio.channels.Selector;
 import java.nio.channels.ServerSocketChannel;
 import java.nio.channels.SocketChannel;
 import java.nio.channels.WritableByteChannel;
+import java.nio.charset.StandardCharsets;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -68,7 +69,6 @@ import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
 import javax.security.sasl.SaslServer;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -222,7 +222,7 @@ public abstract class Server {
    * and send back a nicer response.
    */
   private static final ByteBuffer HTTP_GET_BYTES = ByteBuffer.wrap(
-      "GET ".getBytes(Charsets.UTF_8));
+      "GET ".getBytes(StandardCharsets.UTF_8));
   
   /**
    * An HTTP response to send back if we detect an HTTP request to our IPC
@@ -1888,7 +1888,7 @@ public abstract class Server {
     private void setupHttpRequestOnIpcPortResponse() throws IOException {
       Call fakeCall = new Call(0, RpcConstants.INVALID_RETRY_COUNT, null, this);
       fakeCall.setResponse(ByteBuffer.wrap(
-          RECEIVED_HTTP_REQ_RESPONSE.getBytes(Charsets.UTF_8)));
+          RECEIVED_HTTP_REQ_RESPONSE.getBytes(StandardCharsets.UTF_8)));
       fakeCall.sendResponse();
     }
 

+ 3 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java

@@ -19,7 +19,6 @@
 package org.apache.hadoop.metrics2.sink;
 
 import org.apache.commons.configuration.SubsetConfiguration;
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -35,6 +34,7 @@ import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.net.Socket;
+import java.nio.charset.StandardCharsets;
 
 /**
  * A metrics sink that writes to a Graphite server
@@ -150,7 +150,8 @@ public class GraphiteSink implements MetricsSink, Closeable {
         try {
           // Open a connection to Graphite server.
           socket = new Socket(serverHost, serverPort);
-          writer = new OutputStreamWriter(socket.getOutputStream(), Charsets.UTF_8);
+        writer = new OutputStreamWriter(socket.getOutputStream(),
+                StandardCharsets.UTF_8);
         } catch (Exception e) {
           connectionFailures++;
           if (tooManyConnectionFailures()) {

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java

@@ -20,12 +20,12 @@ package org.apache.hadoop.metrics2.sink.ganglia;
 
 import java.io.IOException;
 import java.net.*;
+import java.nio.charset.StandardCharsets;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
 import org.apache.commons.configuration.SubsetConfiguration;
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.metrics2.MetricsSink;
@@ -233,7 +233,7 @@ public abstract class AbstractGangliaSink implements MetricsSink {
    * @param s the string to be written to buffer at offset location
    */
   protected void xdr_string(String s) {
-    byte[] bytes = s.getBytes(Charsets.UTF_8);
+    byte[] bytes = s.getBytes(StandardCharsets.UTF_8);
     int len = bytes.length;
     xdr_int(len);
     System.arraycopy(bytes, 0, buffer, offset, len);

+ 2 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java

@@ -21,15 +21,13 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.NET_TOPOLOGY_TA
 
 import java.io.BufferedReader;
 import java.io.FileInputStream;
-import java.io.FileReader;
-import java.io.IOException;
 import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -102,7 +100,7 @@ public class TableMapping extends CachedDNSToSwitchMapping {
 
       try (BufferedReader reader =
                new BufferedReader(new InputStreamReader(
-                   new FileInputStream(filename), Charsets.UTF_8))) {
+                   new FileInputStream(filename), StandardCharsets.UTF_8))) {
         String line = reader.readLine();
         while (line != null) {
           line = line.trim();

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java

@@ -26,6 +26,7 @@ import java.io.DataOutputStream;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashMap;
@@ -33,7 +34,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -220,7 +220,7 @@ public class Credentials implements Writable {
   }
   
   private static final byte[] TOKEN_STORAGE_MAGIC =
-      "HDTS".getBytes(Charsets.UTF_8);
+      "HDTS".getBytes(StandardCharsets.UTF_8);
   private static final byte TOKEN_STORAGE_VERSION = 0;
   
   public void writeTokenStorageToStream(DataOutputStream os)

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java

@@ -21,6 +21,7 @@ import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.Reader;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Hashtable;
@@ -35,7 +36,6 @@ import javax.naming.directory.InitialDirContext;
 import javax.naming.directory.SearchControls;
 import javax.naming.directory.SearchResult;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -424,7 +424,7 @@ public class LdapGroupsMapping
 
     StringBuilder password = new StringBuilder();
     try (Reader reader = new InputStreamReader(
-        new FileInputStream(pwFile), Charsets.UTF_8)) {
+        new FileInputStream(pwFile), StandardCharsets.UTF_8)) {
       int c = reader.read();
       while (c > -1) {
         password.append((char)c);

+ 4 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java

@@ -23,6 +23,7 @@ import java.io.DataInput;
 import java.io.DataInputStream;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.security.PrivilegedExceptionAction;
 import java.security.Security;
 import java.util.ArrayList;
@@ -44,7 +45,6 @@ import javax.security.sasl.SaslServer;
 import javax.security.sasl.SaslServerFactory;
 
 import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -185,11 +185,11 @@ public class SaslRpcServer {
   }
   
   static String encodeIdentifier(byte[] identifier) {
-    return new String(Base64.encodeBase64(identifier), Charsets.UTF_8);
+    return new String(Base64.encodeBase64(identifier), StandardCharsets.UTF_8);
   }
 
   static byte[] decodeIdentifier(String identifier) {
-    return Base64.decodeBase64(identifier.getBytes(Charsets.UTF_8));
+    return Base64.decodeBase64(identifier.getBytes(StandardCharsets.UTF_8));
   }
 
   public static <T extends TokenIdentifier> T getIdentifier(String id,
@@ -208,7 +208,7 @@ public class SaslRpcServer {
 
   static char[] encodePassword(byte[] password) {
     return new String(Base64.encodeBase64(password),
-                      Charsets.UTF_8).toCharArray();
+                      StandardCharsets.UTF_8).toCharArray();
   }
 
   /** Splitting fully qualified Kerberos name into parts */

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java

@@ -23,12 +23,12 @@ import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -582,7 +582,7 @@ public class ShellBasedIdMapping implements IdMappingServiceProvider {
     Map<Integer, Integer> gidMapping = new HashMap<Integer, Integer>();
     
     BufferedReader in = new BufferedReader(new InputStreamReader(
-        new FileInputStream(staticMapFile), Charsets.UTF_8));
+        new FileInputStream(staticMapFile), StandardCharsets.UTF_8));
     
     try {
       String line = null;

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java

@@ -20,10 +20,10 @@ package org.apache.hadoop.security.alias;
 
 import java.io.IOException;
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
@@ -58,7 +58,7 @@ public class UserProvider extends CredentialProvider {
       return null;
     }
     return new CredentialEntry(
-        alias, new String(bytes, Charsets.UTF_8).toCharArray());
+        alias, new String(bytes, StandardCharsets.UTF_8).toCharArray());
   }
 
   @Override

+ 0 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java

@@ -21,11 +21,9 @@ import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
 import java.net.InetSocketAddress;
-import java.util.Arrays;
 import java.util.LinkedList;
 import java.util.List;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java

@@ -23,12 +23,12 @@ import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.Reader;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashSet;
 import java.util.List;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
@@ -89,7 +89,7 @@ public class FileBasedIPList implements IPList {
         if (file.exists()) {
           try (
               Reader fileReader = new InputStreamReader(
-                  new FileInputStream(file), Charsets.UTF_8);
+                  new FileInputStream(file), StandardCharsets.UTF_8);
               BufferedReader bufferedReader = new BufferedReader(fileReader)) {
             List<String> lines = new ArrayList<String>();
             String line = null;

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java

@@ -19,13 +19,13 @@
 package org.apache.hadoop.util;
 
 import java.io.*;
+import java.nio.charset.StandardCharsets;
 import java.util.Set;
 import java.util.HashSet;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.Log;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -85,7 +85,7 @@ public class HostsFileReader {
     BufferedReader reader = null;
     try {
       reader = new BufferedReader(
-          new InputStreamReader(fileInputStream, Charsets.UTF_8));
+          new InputStreamReader(fileInputStream, StandardCharsets.UTF_8));
       String line;
       while ((line = reader.readLine()) != null) {
         String[] nodes = line.split("[ \t\n\f\r]+");

+ 2 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java

@@ -25,12 +25,12 @@ import java.io.IOException;
 import java.net.DatagramPacket;
 import java.net.DatagramSocket;
 import java.net.SocketException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.metrics2.AbstractMetric;
@@ -148,7 +148,7 @@ public class TestGangliaMetrics {
   private void checkMetrics(List<byte[]> bytearrlist, int expectedCount) {
     boolean[] foundMetrics = new boolean[expectedMetrics.length];
     for (byte[] bytes : bytearrlist) {
-      String binaryStr = new String(bytes, Charsets.UTF_8);
+      String binaryStr = new String(bytes, StandardCharsets.UTF_8);
       for (int index = 0; index < expectedMetrics.length; index++) {
         if (binaryStr.indexOf(expectedMetrics[index]) >= 0) {
           foundMetrics[index] = true;

+ 5 - 3
hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java

@@ -17,7 +17,6 @@
  */
 
 package org.apache.hadoop.minikdc;
-import org.apache.commons.io.Charsets;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.text.StrSubstitutor;
@@ -65,6 +64,7 @@ import java.io.InputStreamReader;
 import java.io.StringReader;
 import java.lang.reflect.Method;
 import java.net.InetSocketAddress;
+import java.nio.charset.StandardCharsets;
 import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -135,7 +135,8 @@ public class MiniKdc {
     Properties userConf = new Properties();
     InputStreamReader r = null;
     try {
-      r = new InputStreamReader(new FileInputStream(file), Charsets.UTF_8);
+      r = new InputStreamReader(new FileInputStream(file),
+          StandardCharsets.UTF_8);
       userConf.load(r);
     } finally {
       if (r != null) {
@@ -450,7 +451,8 @@ public class MiniKdc {
     BufferedReader r = null;
 
     try {
-      r = new BufferedReader(new InputStreamReader(is2, Charsets.UTF_8));
+      r = new BufferedReader(
+          new InputStreamReader(is2, StandardCharsets.UTF_8));
       String line = r.readLine();
 
       while (line != null) {

+ 3 - 2
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java

@@ -17,9 +17,9 @@
  */
 package org.apache.hadoop.mount;
 
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.NfsExports;
 import org.apache.hadoop.oncrpc.RpcAcceptedReply;
 import org.apache.hadoop.oncrpc.XDR;
@@ -77,7 +77,8 @@ public class MountResponse {
       if (hostGroups.length > 0) {
         for (int j = 0; j < hostGroups.length; j++) {
           xdr.writeBoolean(true); // Value follows - yes
-          xdr.writeVariableOpaque(hostGroups[j].getBytes(Charsets.UTF_8));
+          xdr.writeVariableOpaque(
+              hostGroups[j].getBytes(StandardCharsets.UTF_8));
         }
       }
       xdr.writeBoolean(false); // Value follows - no more group

+ 2 - 2
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java

@@ -18,11 +18,11 @@
 package org.apache.hadoop.nfs.nfs3;
 
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.util.Arrays;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.XDR;
@@ -73,7 +73,7 @@ public class FileHandle {
       return;
     }
 
-    byte[] in = s.getBytes(Charsets.UTF_8);
+    byte[] in = s.getBytes(StandardCharsets.UTF_8);
     digest.update(in);
 
     byte[] digestbytes = digest.digest();

+ 3 - 3
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/CREATE3Request.java

@@ -18,8 +18,8 @@
 package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
 import org.apache.hadoop.oncrpc.XDR;
@@ -79,9 +79,9 @@ public class CREATE3Request extends RequestWithHandle {
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
     xdr.writeInt(name.length());
-    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8), name.length());
+    xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8), name.length());
     xdr.writeInt(mode);
     objAttr.serialize(xdr);
   }
 
-}
+}

+ 3 - 2
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LINK3Request.java

@@ -18,8 +18,8 @@
 package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -57,6 +57,7 @@ public class LINK3Request extends RequestWithHandle {
     handle.serialize(xdr);
     fromDirHandle.serialize(xdr);
     xdr.writeInt(fromName.length());
-    xdr.writeFixedOpaque(fromName.getBytes(Charsets.UTF_8), fromName.length());
+    xdr.writeFixedOpaque(fromName.getBytes(StandardCharsets.UTF_8),
+        fromName.length());
   }
 }

+ 4 - 4
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LOOKUP3Request.java

@@ -18,8 +18,8 @@
 package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -54,7 +54,7 @@ public class LOOKUP3Request extends RequestWithHandle {
   @VisibleForTesting
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
-    xdr.writeInt(name.getBytes(Charsets.UTF_8).length);
-    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8));
+    xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length);
+    xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8));
   }
-}
+}

+ 4 - 4
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKDIR3Request.java

@@ -18,8 +18,8 @@
 package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -55,8 +55,8 @@ public class MKDIR3Request extends RequestWithHandle {
   @Override
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
-    xdr.writeInt(name.getBytes(Charsets.UTF_8).length);
-    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8));
+    xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length);
+    xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8));
     objAttr.serialize(xdr);
   }
-}
+}

+ 2 - 2
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKNOD3Request.java

@@ -18,8 +18,8 @@
 package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.NfsFileType;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes.Specdata3;
@@ -80,7 +80,7 @@ public class MKNOD3Request extends RequestWithHandle {
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
     xdr.writeInt(name.length());
-    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8), name.length());
+    xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8), name.length());
     objAttr.serialize(xdr);
     if (spec != null) {
       xdr.writeInt(spec.getSpecdata1());

+ 4 - 4
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/REMOVE3Request.java

@@ -18,8 +18,8 @@
 package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -47,7 +47,7 @@ public class REMOVE3Request extends RequestWithHandle {
   @Override
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
-    xdr.writeInt(name.getBytes(Charsets.UTF_8).length);
-    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8));
+    xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length);
+    xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8));
   }
-}
+}

+ 6 - 6
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RENAME3Request.java

@@ -18,8 +18,8 @@
 package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -67,10 +67,10 @@ public class RENAME3Request extends NFS3Request {
   @Override
   public void serialize(XDR xdr) {
     fromDirHandle.serialize(xdr);
-    xdr.writeInt(fromName.getBytes(Charsets.UTF_8).length);
-    xdr.writeFixedOpaque(fromName.getBytes(Charsets.UTF_8));
+    xdr.writeInt(fromName.getBytes(StandardCharsets.UTF_8).length);
+    xdr.writeFixedOpaque(fromName.getBytes(StandardCharsets.UTF_8));
     toDirHandle.serialize(xdr);
-    xdr.writeInt(toName.getBytes(Charsets.UTF_8).length);
-    xdr.writeFixedOpaque(toName.getBytes(Charsets.UTF_8));
+    xdr.writeInt(toName.getBytes(StandardCharsets.UTF_8).length);
+    xdr.writeFixedOpaque(toName.getBytes(StandardCharsets.UTF_8));
   }
-}
+}

+ 4 - 4
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RMDIR3Request.java

@@ -18,8 +18,8 @@
 package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -47,7 +47,7 @@ public class RMDIR3Request extends RequestWithHandle {
   @Override
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
-    xdr.writeInt(name.getBytes(Charsets.UTF_8).length);
-    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8));
+    xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length);
+    xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8));
   }
-}
+}

+ 6 - 6
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/SYMLINK3Request.java

@@ -18,8 +18,8 @@
 package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -63,10 +63,10 @@ public class SYMLINK3Request extends RequestWithHandle {
   @Override
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
-    xdr.writeInt(name.getBytes(Charsets.UTF_8).length);
-    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8));
+    xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length);
+    xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8));
     symAttr.serialize(xdr);
-    xdr.writeInt(symData.getBytes(Charsets.UTF_8).length);
-    xdr.writeFixedOpaque(symData.getBytes(Charsets.UTF_8));
+    xdr.writeInt(symData.getBytes(StandardCharsets.UTF_8).length);
+    xdr.writeFixedOpaque(symData.getBytes(StandardCharsets.UTF_8));
   }
-}
+}

+ 4 - 4
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/XDR.java

@@ -18,8 +18,8 @@
 package org.apache.hadoop.oncrpc;
 
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.jboss.netty.buffer.ChannelBuffer;
 import org.jboss.netty.buffer.ChannelBuffers;
 
@@ -166,11 +166,11 @@ public final class XDR {
   }
 
   public String readString() {
-    return new String(readVariableOpaque(), Charsets.UTF_8);
+    return new String(readVariableOpaque(), StandardCharsets.UTF_8);
   }
 
   public void writeString(String s) {
-    writeVariableOpaque(s.getBytes(Charsets.UTF_8));
+    writeVariableOpaque(s.getBytes(StandardCharsets.UTF_8));
   }
 
   private void writePadding() {
@@ -270,4 +270,4 @@ public final class XDR {
 
     return b;
   }
-}
+}

+ 3 - 3
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsSys.java

@@ -19,9 +19,9 @@ package org.apache.hadoop.oncrpc.security;
 
 import java.net.InetAddress;
 import java.net.UnknownHostException;
+import java.nio.charset.StandardCharsets;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.oncrpc.XDR;
 
 /** Credential used by AUTH_SYS */
@@ -106,11 +106,11 @@ public class CredentialsSys extends Credentials {
   public void write(XDR xdr) {
     int padding = 0;
     // Ensure there are padding bytes if hostname is not a multiple of 4.
-    padding = 4 - (mHostName.getBytes(Charsets.UTF_8).length % 4);
+    padding = 4 - (mHostName.getBytes(StandardCharsets.UTF_8).length % 4);
     // padding bytes is zero if hostname is already a multiple of 4.
     padding = padding % 4;
     // mStamp + mHostName.length + mHostName + mUID + mGID + mAuxGIDs.count
-    mCredentialsLength = 20 + mHostName.getBytes(Charsets.UTF_8).length;
+    mCredentialsLength = 20 + mHostName.getBytes(StandardCharsets.UTF_8).length;
     mCredentialsLength = mCredentialsLength + padding;
     // mAuxGIDs
     if (mAuxGIDs != null && mAuxGIDs.length > 0) {