Browse Source

HADOOP-11602. Backport 'Fix toUpperCase/toLowerCase to use Locale.ENGLISH.' (ozawa)

(cherry picked from commit b46f9e72dbed6fd1f8cae1e12973252462d6ee15)
Tsuyoshi Ozawa 10 năm trước cách đây
mục cha
commit
28f4e6b222
100 tập tin đã thay đổi với 382 bổ sung205 xóa
  1. 3 2
      hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java
  2. 4 2
      hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java
  3. 1 1
      hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
  4. 8 6
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java
  5. 2 0
      hadoop-common-project/hadoop-common/CHANGES.txt
  6. 2 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
  7. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java
  8. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
  9. 5 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
  10. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageType.java
  11. 3 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java
  12. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
  13. 3 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java
  14. 4 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
  15. 4 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
  16. 3 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
  17. 7 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CGenerator.java
  18. 7 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CppGenerator.java
  19. 3 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Rcc.java
  20. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java
  21. 7 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
  22. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java
  23. 3 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
  24. 3 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
  25. 6 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
  26. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
  27. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
  28. 1 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java
  29. 39 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
  30. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
  31. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
  32. 5 3
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java
  33. 3 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
  34. 4 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java
  35. 21 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java
  36. 4 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
  37. 3 2
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java
  38. 3 1
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java
  39. 5 2
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
  40. 3 1
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
  41. 2 1
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java
  42. 4 2
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java
  43. 1 1
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java
  44. 2 1
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java
  45. 2 1
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java
  46. 12 7
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java
  47. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
  48. 3 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java
  49. 3 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
  50. 3 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java
  51. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
  52. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java
  53. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
  54. 9 8
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
  55. 4 3
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java
  56. 3 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
  57. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java
  58. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java
  59. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
  60. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java
  61. 2 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java
  62. 3 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java
  63. 2 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
  64. 3 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
  65. 2 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
  66. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
  67. 4 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
  68. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
  69. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java
  70. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java
  71. 4 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
  72. 6 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
  73. 6 6
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
  74. 3 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
  75. 4 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java
  76. 2 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java
  77. 3 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java
  78. 2 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java
  79. 10 7
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java
  80. 2 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java
  81. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java
  82. 3 1
      hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java
  83. 2 2
      hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java
  84. 8 4
      hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
  85. 3 1
      hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
  86. 2 1
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java
  87. 2 2
      hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemExtendedContract.java
  88. 17 16
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java
  89. 1 1
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
  90. 2 1
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
  91. 2 1
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
  92. 2 1
      hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java
  93. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
  94. 2 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
  95. 4 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
  96. 2 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
  97. 3 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java
  98. 3 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java
  99. 2 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java
  100. 3 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java

+ 3 - 2
hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java

@@ -21,6 +21,7 @@ import com.sun.javadoc.DocErrorReporter;
 
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Locale;
 
 class StabilityOptions {
   public static final String STABLE_OPTION = "-stable";
@@ -28,7 +29,7 @@ class StabilityOptions {
   public static final String UNSTABLE_OPTION = "-unstable";
 
   public static Integer optionLength(String option) {
-    String opt = option.toLowerCase();
+    String opt = option.toLowerCase(Locale.ENGLISH);
     if (opt.equals(UNSTABLE_OPTION)) return 1;
     if (opt.equals(EVOLVING_OPTION)) return 1;
     if (opt.equals(STABLE_OPTION)) return 1;
@@ -38,7 +39,7 @@ class StabilityOptions {
   public static void validOptions(String[][] options,
       DocErrorReporter reporter) {
     for (int i = 0; i < options.length; i++) {
-      String opt = options[i][0].toLowerCase();
+      String opt = options[i][0].toLowerCase(Locale.ENGLISH);
       if (opt.equals(UNSTABLE_OPTION)) {
 	RootDocProcessor.stability = UNSTABLE_OPTION;
       } else if (opt.equals(EVOLVING_OPTION)) {

+ 4 - 2
hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java

@@ -14,6 +14,7 @@
 package org.apache.hadoop.security.authentication.server;
 
 import java.io.IOException;
+import java.util.Locale;
 import java.util.Properties;
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
@@ -68,7 +69,8 @@ public abstract class AltKerberosAuthenticationHandler
             NON_BROWSER_USER_AGENTS, NON_BROWSER_USER_AGENTS_DEFAULT)
             .split("\\W*,\\W*");
     for (int i = 0; i < nonBrowserUserAgents.length; i++) {
-        nonBrowserUserAgents[i] = nonBrowserUserAgents[i].toLowerCase();
+        nonBrowserUserAgents[i] =
+            nonBrowserUserAgents[i].toLowerCase(Locale.ENGLISH);
     }
   }
 
@@ -120,7 +122,7 @@ public abstract class AltKerberosAuthenticationHandler
     if (userAgent == null) {
       return false;
     }
-    userAgent = userAgent.toLowerCase();
+    userAgent = userAgent.toLowerCase(Locale.ENGLISH);
     boolean isBrowser = true;
     for (String nonBrowserUserAgent : nonBrowserUserAgents) {
         if (userAgent.contains(nonBrowserUserAgent)) {

+ 1 - 1
hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java

@@ -110,7 +110,7 @@ public class KerberosUtil {
     }
     // convert hostname to lowercase as kerberos does not work with hostnames
     // with uppercase characters.
-    return service + "/" + fqdn.toLowerCase(Locale.US);
+    return service + "/" + fqdn.toLowerCase(Locale.ENGLISH);
   }
 
   /**

+ 8 - 6
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java

@@ -21,6 +21,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+import java.util.Locale;
 import java.util.regex.Pattern;
 
 import org.apache.directory.server.kerberos.shared.keytab.Keytab;
@@ -58,24 +59,25 @@ public class TestKerberosUtil {
 
     // send null hostname
     Assert.assertEquals("When no hostname is sent",
-        service + "/" + localHostname.toLowerCase(),
+        service + "/" + localHostname.toLowerCase(Locale.ENGLISH),
         KerberosUtil.getServicePrincipal(service, null));
     // send empty hostname
     Assert.assertEquals("When empty hostname is sent",
-        service + "/" + localHostname.toLowerCase(),
+        service + "/" + localHostname.toLowerCase(Locale.ENGLISH),
         KerberosUtil.getServicePrincipal(service, ""));
     // send 0.0.0.0 hostname
     Assert.assertEquals("When 0.0.0.0 hostname is sent",
-        service + "/" + localHostname.toLowerCase(),
+        service + "/" + localHostname.toLowerCase(Locale.ENGLISH),
         KerberosUtil.getServicePrincipal(service, "0.0.0.0"));
     // send uppercase hostname
     Assert.assertEquals("When uppercase hostname is sent",
-        service + "/" + testHost.toLowerCase(),
+        service + "/" + testHost.toLowerCase(Locale.ENGLISH),
         KerberosUtil.getServicePrincipal(service, testHost));
     // send lowercase hostname
     Assert.assertEquals("When lowercase hostname is sent",
-        service + "/" + testHost.toLowerCase(),
-        KerberosUtil.getServicePrincipal(service, testHost.toLowerCase()));
+        service + "/" + testHost.toLowerCase(Locale.ENGLISH),
+        KerberosUtil.getServicePrincipal(
+            service, testHost.toLowerCase(Locale.ENGLISH)));
   }
   
   @Test

+ 2 - 0
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -642,6 +642,8 @@ Release 2.7.0 - UNRELEASED
 
     HADOOP-11670. Regression: s3a auth setup broken. (Adam Budde via stevel)
 
+    HADOOP-11602. Fix toUpperCase/toLowerCase to use Locale.ENGLISH. (ozawa)
+
 Release 2.6.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

+ 2 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java

@@ -1434,11 +1434,9 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
       return defaultValue;
     }
 
-    valueString = valueString.toLowerCase();
-
-    if ("true".equals(valueString))
+    if (StringUtils.equalsIgnoreCase("true", valueString))
       return true;
-    else if ("false".equals(valueString))
+    else if (StringUtils.equalsIgnoreCase("false", valueString))
       return false;
     else return defaultValue;
   }

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java

@@ -19,6 +19,7 @@
 package org.apache.hadoop.crypto;
 
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * Defines properties of a CipherSuite. Modeled after the ciphers in
@@ -97,7 +98,7 @@ public enum CipherSuite {
     String[] parts = name.split("/");
     StringBuilder suffix = new StringBuilder();
     for (String part : parts) {
-      suffix.append(".").append(part.toLowerCase());
+      suffix.append(".").append(StringUtils.toLowerCase(part));
     }
     
     return suffix.toString();

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java

@@ -28,6 +28,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.security.ProviderUtils;
+import org.apache.hadoop.util.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -422,7 +423,7 @@ public class JavaKeyStoreProvider extends KeyProvider {
   @Override
   public KeyVersion createKey(String name, byte[] material,
                                Options options) throws IOException {
-    Preconditions.checkArgument(name.equals(name.toLowerCase()),
+    Preconditions.checkArgument(name.equals(StringUtils.toLowerCase(name)),
         "Uppercase key names are unsupported: %s", name);
     writeLock.lock();
     try {

+ 5 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java

@@ -65,6 +65,7 @@ import org.apache.hadoop.util.DataChecksum;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.ShutdownHookManager;
+import org.apache.hadoop.util.StringUtils;
 
 import com.google.common.annotations.VisibleForTesting;
 
@@ -2796,8 +2797,10 @@ public abstract class FileSystem extends Configured implements Closeable {
       }
 
       Key(URI uri, Configuration conf, long unique) throws IOException {
-        scheme = uri.getScheme()==null?"":uri.getScheme().toLowerCase();
-        authority = uri.getAuthority()==null?"":uri.getAuthority().toLowerCase();
+        scheme = uri.getScheme()==null ?
+            "" : StringUtils.toLowerCase(uri.getScheme());
+        authority = uri.getAuthority()==null ?
+            "" : StringUtils.toLowerCase(uri.getAuthority());
         this.unique = unique;
         
         this.ugi = UserGroupInformation.getCurrentUser();

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageType.java

@@ -24,6 +24,7 @@ import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * Defines the types of supported storage media. The default storage
@@ -78,7 +79,7 @@ public enum StorageType {
   }
 
   public static StorageType parseStorageType(String s) {
-    return StorageType.valueOf(s.toUpperCase());
+    return StorageType.valueOf(StringUtils.toUpperCase(s));
   }
 
   private static List<StorageType> getNonTransientTypes() {

+ 3 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java

@@ -106,7 +106,7 @@ public class AclEntry {
       sb.append("default:");
     }
     if (type != null) {
-      sb.append(type.toString().toLowerCase());
+      sb.append(StringUtils.toLowerCase(type.toString()));
     }
     sb.append(':');
     if (name != null) {
@@ -263,7 +263,8 @@ public class AclEntry {
 
     AclEntryType aclType = null;
     try {
-      aclType = Enum.valueOf(AclEntryType.class, split[index].toUpperCase());
+      aclType = Enum.valueOf(
+          AclEntryType.class, StringUtils.toUpperCase(split[index]));
       builder.setType(aclType);
       index++;
     } catch (IllegalArgumentException iae) {

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java

@@ -79,7 +79,7 @@ class XAttrCommands extends FsCommand {
       String en = StringUtils.popOptionWithArgument("-e", args);
       if (en != null) {
         try {
-          encoding = enValueOfFunc.apply(en.toUpperCase(Locale.ENGLISH));
+          encoding = enValueOfFunc.apply(StringUtils.toUpperCase(en));
         } catch (IllegalArgumentException e) {
           throw new IllegalArgumentException(
               "Invalid/unsupported encoding option specified: " + en);

+ 3 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java

@@ -22,6 +22,7 @@ import java.util.Deque;
 
 import org.apache.hadoop.fs.GlobPattern;
 import org.apache.hadoop.fs.shell.PathData;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * Implements the -name expression for the
@@ -73,7 +74,7 @@ final class Name extends BaseExpression {
   public void prepare() throws IOException {
     String argPattern = getArgument(1);
     if (!caseSensitive) {
-      argPattern = argPattern.toLowerCase();
+      argPattern = StringUtils.toLowerCase(argPattern);
     }
     globPattern = new GlobPattern(argPattern);
   }
@@ -82,7 +83,7 @@ final class Name extends BaseExpression {
   public Result apply(PathData item, int depth) throws IOException {
     String name = getPath(item).getName();
     if (!caseSensitive) {
-      name = name.toLowerCase();
+      name = StringUtils.toLowerCase(name);
     }
     if (globPattern.matches(name)) {
       return Result.PASS;

+ 4 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java

@@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * A factory that will find the correct codec for a given filename.
@@ -66,10 +67,10 @@ public class CompressionCodecFactory {
     codecsByClassName.put(codec.getClass().getCanonicalName(), codec);
 
     String codecName = codec.getClass().getSimpleName();
-    codecsByName.put(codecName.toLowerCase(), codec);
+    codecsByName.put(StringUtils.toLowerCase(codecName), codec);
     if (codecName.endsWith("Codec")) {
       codecName = codecName.substring(0, codecName.length() - "Codec".length());
-      codecsByName.put(codecName.toLowerCase(), codec);
+      codecsByName.put(StringUtils.toLowerCase(codecName), codec);
     }
   }
 
@@ -246,7 +247,7 @@ public class CompressionCodecFactory {
       if (codec == null) {
         // trying to get the codec by name in case the name was specified
         // instead a class
-        codec = codecsByName.get(codecName.toLowerCase());
+        codec = codecsByName.get(StringUtils.toLowerCase(codecName));
       }
       return codec;
     }

+ 4 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java

@@ -44,6 +44,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.metrics2.MetricsFilter;
 import org.apache.hadoop.metrics2.MetricsPlugin;
 import org.apache.hadoop.metrics2.filter.GlobFilter;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * Metrics configuration for MetricsSystemImpl
@@ -85,12 +86,12 @@ class MetricsConfig extends SubsetConfiguration {
   private ClassLoader pluginLoader;
 
   MetricsConfig(Configuration c, String prefix) {
-    super(c, prefix.toLowerCase(Locale.US), ".");
+    super(c, StringUtils.toLowerCase(prefix), ".");
   }
 
   static MetricsConfig create(String prefix) {
-    return loadFirst(prefix, "hadoop-metrics2-"+ prefix.toLowerCase(Locale.US)
-                     +".properties", DEFAULT_FILE_NAME);
+    return loadFirst(prefix, "hadoop-metrics2-" +
+        StringUtils.toLowerCase(prefix) + ".properties", DEFAULT_FILE_NAME);
   }
 
   static MetricsConfig create(String prefix, String... fileNames) {

+ 3 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java

@@ -61,6 +61,7 @@ import org.apache.hadoop.metrics2.lib.MetricsRegistry;
 import org.apache.hadoop.metrics2.lib.MetricsSourceBuilder;
 import org.apache.hadoop.metrics2.lib.MutableStat;
 import org.apache.hadoop.metrics2.util.MBeans;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Time;
 
 /**
@@ -620,7 +621,7 @@ public class MetricsSystemImpl extends MetricsSystem implements MetricsSource {
     LOG.debug("from environment variable: "+ System.getenv(MS_INIT_MODE_KEY));
     String m = System.getProperty(MS_INIT_MODE_KEY);
     String m2 = m == null ? System.getenv(MS_INIT_MODE_KEY) : m;
-    return InitMode.valueOf((m2 == null ? InitMode.NORMAL.name() : m2)
-                            .toUpperCase(Locale.US));
+    return InitMode.valueOf(
+        StringUtils.toUpperCase((m2 == null ? InitMode.NORMAL.name() : m2)));
   }
 }

+ 7 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CGenerator.java

@@ -23,6 +23,7 @@ import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
 import java.util.Iterator;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * C Code generator front-end for Hadoop record I/O.
@@ -46,8 +47,10 @@ class CGenerator extends CodeGenerator {
     try {
       FileWriter hh = new FileWriter(name+".h");
       try {
-        hh.write("#ifndef __"+name.toUpperCase().replace('.','_')+"__\n");
-        hh.write("#define __"+name.toUpperCase().replace('.','_')+"__\n");
+        hh.write("#ifndef __"+
+            StringUtils.toUpperCase(name).replace('.','_')+"__\n");
+        hh.write("#define __"+
+            StringUtils.toUpperCase(name).replace('.','_')+"__\n");
         hh.write("#include \"recordio.h\"\n");
         for (Iterator<JFile> iter = ilist.iterator(); iter.hasNext();) {
           hh.write("#include \""+iter.next().getName()+".h\"\n");
@@ -61,7 +64,8 @@ class CGenerator extends CodeGenerator {
         }
          */
 
-        hh.write("#endif //"+name.toUpperCase().replace('.','_')+"__\n");
+        hh.write("#endif //"+
+            StringUtils.toUpperCase(name).replace('.','_')+"__\n");
       } finally {
         hh.close();
       }

+ 7 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CppGenerator.java

@@ -23,6 +23,7 @@ import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
 import java.util.Iterator;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * C++ Code generator front-end for Hadoop record I/O.
@@ -49,8 +50,10 @@ class CppGenerator extends CodeGenerator {
       
       try {
         String fileName = (new File(name)).getName();
-        hh.write("#ifndef __"+fileName.toUpperCase().replace('.','_')+"__\n");
-        hh.write("#define __"+fileName.toUpperCase().replace('.','_')+"__\n");
+        hh.write("#ifndef __"+
+            StringUtils.toUpperCase(fileName).replace('.','_')+"__\n");
+        hh.write("#define __"+
+            StringUtils.toUpperCase(fileName).replace('.','_')+"__\n");
         hh.write("#include \"recordio.hh\"\n");
         hh.write("#include \"recordTypeInfo.hh\"\n");
         for (Iterator<JFile> iter = ilist.iterator(); iter.hasNext();) {
@@ -64,7 +67,8 @@ class CppGenerator extends CodeGenerator {
           iter.next().genCppCode(hh, cc, options);
         }
         
-        hh.write("#endif //"+fileName.toUpperCase().replace('.','_')+"__\n");
+        hh.write("#endif //"+
+            StringUtils.toUpperCase(fileName).replace('.','_')+"__\n");
       } finally {
         hh.close();
       }

+ 3 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Rcc.java

@@ -28,6 +28,7 @@ import java.io.File;
 import java.io.FileReader;
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
@@ -58,7 +59,7 @@ public class Rcc implements RccConstants {
     for (int i=0; i<args.length; i++) {
       if ("-l".equalsIgnoreCase(args[i]) ||
           "--language".equalsIgnoreCase(args[i])) {
-        language = args[i+1].toLowerCase();
+        language = StringUtils.toLowerCase(args[i+1]);
         i++;
       } else if ("-d".equalsIgnoreCase(args[i]) ||
                  "--destdir".equalsIgnoreCase(args[i])) {
@@ -69,7 +70,7 @@ public class Rcc implements RccConstants {
         if (arg.startsWith("-")) {
           arg = arg.substring(1);
         }
-        cmdargs.add(arg.toLowerCase());
+        cmdargs.add(StringUtils.toLowerCase(arg));
       } else {
         recFiles.add(args[i]);
       }

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java

@@ -66,7 +66,8 @@ public class SaslPropertiesResolver implements Configurable{
         CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION,
         QualityOfProtection.AUTHENTICATION.toString());
     for (int i=0; i < qop.length; i++) {
-      qop[i] = QualityOfProtection.valueOf(qop[i].toUpperCase(Locale.ENGLISH)).getSaslQop();
+      qop[i] = QualityOfProtection.valueOf(
+          StringUtils.toUpperCase(qop[i])).getSaslQop();
     }
     properties.put(Sasl.QOP, StringUtils.join(",", qop));
     properties.put(Sasl.SERVER_AUTH, "true");

+ 7 - 5
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java

@@ -27,7 +27,6 @@ import java.security.PrivilegedAction;
 import java.security.PrivilegedExceptionAction;
 import java.util.Arrays;
 import java.util.List;
-import java.util.Locale;
 import java.util.ServiceLoader;
 
 import javax.security.auth.kerberos.KerberosPrincipal;
@@ -44,6 +43,7 @@ import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenInfo;
+import org.apache.hadoop.util.StringUtils;
 
 
 //this will need to be replaced someday when there is a suitable replacement
@@ -182,7 +182,8 @@ public class SecurityUtil {
     if (fqdn == null || fqdn.isEmpty() || fqdn.equals("0.0.0.0")) {
       fqdn = getLocalHostName();
     }
-    return components[0] + "/" + fqdn.toLowerCase(Locale.US) + "@" + components[2];
+    return components[0] + "/" +
+        StringUtils.toLowerCase(fqdn) + "@" + components[2];
   }
   
   static String getLocalHostName() throws UnknownHostException {
@@ -379,7 +380,7 @@ public class SecurityUtil {
       }
       host = addr.getAddress().getHostAddress();
     } else {
-      host = addr.getHostName().toLowerCase();
+      host = StringUtils.toLowerCase(addr.getHostName());
     }
     return new Text(host + ":" + addr.getPort());
   }
@@ -606,7 +607,8 @@ public class SecurityUtil {
   public static AuthenticationMethod getAuthenticationMethod(Configuration conf) {
     String value = conf.get(HADOOP_SECURITY_AUTHENTICATION, "simple");
     try {
-      return Enum.valueOf(AuthenticationMethod.class, value.toUpperCase(Locale.ENGLISH));
+      return Enum.valueOf(AuthenticationMethod.class,
+          StringUtils.toUpperCase(value));
     } catch (IllegalArgumentException iae) {
       throw new IllegalArgumentException("Invalid attribute value for " +
           HADOOP_SECURITY_AUTHENTICATION + " of " + value);
@@ -619,7 +621,7 @@ public class SecurityUtil {
       authenticationMethod = AuthenticationMethod.SIMPLE;
     }
     conf.set(HADOOP_SECURITY_AUTHENTICATION,
-             authenticationMethod.toString().toLowerCase(Locale.ENGLISH));
+        StringUtils.toLowerCase(authenticationMethod.toString()));
   }
 
   /*

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java

@@ -138,7 +138,8 @@ public class WhitelistBasedResolver extends SaslPropertiesResolver {
         QualityOfProtection.PRIVACY.toString());
 
     for (int i=0; i < qop.length; i++) {
-      qop[i] = QualityOfProtection.valueOf(qop[i].toUpperCase()).getSaslQop();
+      qop[i] = QualityOfProtection.valueOf(
+          StringUtils.toUpperCase(qop[i])).getSaslQop();
     }
 
     saslProps.put(Sasl.QOP, StringUtils.join(",", qop));

+ 3 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java

@@ -23,6 +23,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.StringUtils;
 
 import javax.net.ssl.KeyManager;
 import javax.net.ssl.KeyManagerFactory;
@@ -94,7 +95,8 @@ public class FileBasedKeyStoresFactory implements KeyStoresFactory {
   @VisibleForTesting
   public static String resolvePropertyName(SSLFactory.Mode mode,
                                            String template) {
-    return MessageFormat.format(template, mode.toString().toLowerCase());
+    return MessageFormat.format(
+        template, StringUtils.toLowerCase(mode.toString()));
   }
 
   /**

+ 3 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java

@@ -22,6 +22,7 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.StringUtils;
 import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
 
 import javax.net.ssl.HostnameVerifier;
@@ -137,8 +138,8 @@ public class SSLFactory implements ConnectionConfigurator {
 
   private HostnameVerifier getHostnameVerifier(Configuration conf)
       throws GeneralSecurityException, IOException {
-    return getHostnameVerifier(conf.get(SSL_HOSTNAME_VERIFIER_KEY, "DEFAULT").
-        trim().toUpperCase());
+    return getHostnameVerifier(StringUtils.toUpperCase(
+        conf.get(SSL_HOSTNAME_VERIFIER_KEY, "DEFAULT").trim()));
   }
 
   public static HostnameVerifier getHostnameVerifier(String verifier)

+ 6 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java

@@ -52,6 +52,7 @@ import javax.net.ssl.SSLSocket;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  ************************************************************************
@@ -365,7 +366,7 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
             buf.append('<');
             for (int i = 0; i < hosts.length; i++) {
                 String h = hosts[i];
-                h = h != null ? h.trim().toLowerCase() : "";
+                h = h != null ? StringUtils.toLowerCase(h.trim()) : "";
                 hosts[i] = h;
                 if (i > 0) {
                     buf.append('/');
@@ -406,7 +407,7 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
             out:
             for (Iterator<String> it = names.iterator(); it.hasNext();) {
                 // Don't trim the CN, though!
-                final String cn = it.next().toLowerCase();
+                final String cn = StringUtils.toLowerCase(it.next());
                 // Store CN in StringBuffer in case we need to report an error.
                 buf.append(" <");
                 buf.append(cn);
@@ -424,7 +425,8 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
                                      acceptableCountryWildcard(cn);
 
                 for (int i = 0; i < hosts.length; i++) {
-                    final String hostName = hosts[i].trim().toLowerCase();
+                    final String hostName =
+                        StringUtils.toLowerCase(hosts[i].trim());
                     if (doWildcard) {
                         match = hostName.endsWith(cn.substring(1));
                         if (match && strictWithSubDomains) {
@@ -479,7 +481,7 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
         }
 
         public static boolean isLocalhost(String host) {
-            host = host != null ? host.trim().toLowerCase() : "";
+            host = host != null ? StringUtils.toLowerCase(host.trim()) : "";
             if (host.startsWith("::1")) {
                 int x = host.lastIndexOf('%');
                 if (x >= 0) {

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java

@@ -47,6 +47,7 @@ import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
 import org.apache.hadoop.util.HttpExceptionUtils;
+import org.apache.hadoop.util.StringUtils;
 import org.codehaus.jackson.map.ObjectMapper;
 
 import com.google.common.annotations.VisibleForTesting;
@@ -169,7 +170,7 @@ public abstract class DelegationTokenAuthenticationHandler
     boolean requestContinues = true;
     String op = ServletUtils.getParameter(request,
         KerberosDelegationTokenAuthenticator.OP_PARAM);
-    op = (op != null) ? op.toUpperCase() : null;
+    op = (op != null) ? StringUtils.toUpperCase(op) : null;
     if (DELEGATION_TOKEN_OPS.contains(op) &&
         !request.getMethod().equals("OPTIONS")) {
       KerberosDelegationTokenAuthenticator.DelegationTokenOperation dtOp =

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java

@@ -27,6 +27,7 @@ import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
 import org.apache.hadoop.util.HttpExceptionUtils;
+import org.apache.hadoop.util.StringUtils;
 import org.codehaus.jackson.map.ObjectMapper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -286,7 +287,7 @@ public abstract class DelegationTokenAuthenticator implements Authenticator {
     HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
     if (hasResponse) {
       String contentType = conn.getHeaderField(CONTENT_TYPE);
-      contentType = (contentType != null) ? contentType.toLowerCase()
+      contentType = (contentType != null) ? StringUtils.toLowerCase(contentType)
                                           : null;
       if (contentType != null &&
           contentType.contains(APPLICATION_JSON_MIME)) {

+ 1 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java

@@ -37,7 +37,6 @@ import java.util.Arrays;
 import java.util.Iterator;
 import java.util.List;
 import java.util.ListIterator;
-import java.util.Locale;
 import java.util.Properties;
 import java.util.Stack;
 
@@ -363,7 +362,7 @@ public class ComparableVersion
 
         items = new ListItem();
 
-        version = version.toLowerCase( Locale.ENGLISH );
+        version = StringUtils.toLowerCase(version);
 
         ListItem list = items;
 

+ 39 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.util;
 
+import com.google.common.base.Preconditions;
 import java.io.PrintWriter;
 import java.io.StringWriter;
 import java.net.URI;
@@ -901,7 +902,7 @@ public class StringUtils {
    */
   public static String camelize(String s) {
     StringBuilder sb = new StringBuilder();
-    String[] words = split(s.toLowerCase(Locale.US), ESCAPE_CHAR, '_');
+    String[] words = split(StringUtils.toLowerCase(s), ESCAPE_CHAR,  '_');
 
     for (String word : words)
       sb.append(org.apache.commons.lang.StringUtils.capitalize(word));
@@ -1032,4 +1033,41 @@ public class StringUtils {
     }
     return null;
   }
+
+  /**
+   * Converts all of the characters in this String to lower case with
+   * Locale.ENGLISH.
+   *
+   * @param str  string to be converted
+   * @return     the str, converted to lowercase.
+   */
+  public static String toLowerCase(String str) {
+    return str.toLowerCase(Locale.ENGLISH);
+  }
+
+  /**
+   * Converts all of the characters in this String to upper case with
+   * Locale.ENGLISH.
+   *
+   * @param str  string to be converted
+   * @return     the str, converted to uppercase.
+   */
+  public static String toUpperCase(String str) {
+    return str.toUpperCase(Locale.ENGLISH);
+  }
+
+  /**
+   * Compare strings locale-freely by using String#equalsIgnoreCase.
+   *
+   * @param s1  Non-null string to be converted
+   * @param s2  string to be converted
+   * @return     the str, converted to uppercase.
+   */
+  public static boolean equalsIgnoreCase(String s1, String s2) {
+    Preconditions.checkNotNull(s1);
+    // don't check non-null against s2 to make the semantics same as
+    // s1.equals(s2)
+    return s1.equalsIgnoreCase(s2);
+  }
+
 }

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java

@@ -1296,7 +1296,7 @@ public class TestIPC {
     
     StringBuilder hexString = new StringBuilder();
     
-    for (String line : hexdump.toUpperCase().split("\n")) {
+    for (String line : StringUtils.toUpperCase(hexdump).split("\n")) {
       hexString.append(line.substring(0, LAST_HEX_COL).replace(" ", ""));
     }
     return StringUtils.hexStringToByte(hexString.toString());

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java

@@ -181,7 +181,7 @@ public class TestSaslRPC {
     StringBuilder sb = new StringBuilder();
     int i = 0;
     for (QualityOfProtection qop:qops){
-     sb.append(qop.name().toLowerCase());
+     sb.append(org.apache.hadoop.util.StringUtils.toLowerCase(qop.name()));
      if (++i < qops.length){
        sb.append(",");
      }

+ 5 - 3
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java

@@ -32,6 +32,7 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.util.StringUtils;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.mockito.Mockito;
@@ -102,13 +103,14 @@ public class TestSecurityUtil {
     String realm = "@REALM";
     String principalInConf = service + SecurityUtil.HOSTNAME_PATTERN + realm;
     String hostname = "FooHost";
-    String principal = service + hostname.toLowerCase() + realm;
+    String principal =
+        service + StringUtils.toLowerCase(hostname) + realm;
     verify(principalInConf, hostname, principal);
   }
 
   @Test
   public void testLocalHostNameForNullOrWild() throws Exception {
-    String local = SecurityUtil.getLocalHostName();
+    String local = StringUtils.toLowerCase(SecurityUtil.getLocalHostName());
     assertEquals("hdfs/" + local + "@REALM",
                  SecurityUtil.getServerPrincipal("hdfs/_HOST@REALM", (String)null));
     assertEquals("hdfs/" + local + "@REALM",
@@ -259,7 +261,7 @@ public class TestSecurityUtil {
     //LOG.info("address:"+addr+" host:"+host+" ip:"+ip+" port:"+port);
 
     SecurityUtil.setTokenServiceUseIp(useIp);
-    String serviceHost = useIp ? ip : host.toLowerCase();
+    String serviceHost = useIp ? ip : StringUtils.toLowerCase(host);
     
     Token<?> token = new Token<TokenIdentifier>();
     Text service = new Text(serviceHost+":"+port);

+ 3 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java

@@ -26,6 +26,7 @@ import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.util.StringUtils;
 import org.junit.*;
 
 import javax.security.auth.Subject;
@@ -212,7 +213,7 @@ public class TestUserGroupInformation {
         userName = userName.substring(sp + 1);
       }
       // user names are case insensitive on Windows. Make consistent
-      userName = userName.toLowerCase();
+      userName = StringUtils.toLowerCase(userName);
     }
     // get the groups
     pp = Runtime.getRuntime().exec(Shell.WINDOWS ?
@@ -232,7 +233,7 @@ public class TestUserGroupInformation {
     String loginUserName = login.getShortUserName();
     if(Shell.WINDOWS) {
       // user names are case insensitive on Windows. Make consistent
-      loginUserName = loginUserName.toLowerCase();
+      loginUserName = StringUtils.toLowerCase(loginUserName);
     }
     assertEquals(userName, loginUserName);
 

+ 4 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java

@@ -29,6 +29,7 @@ import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.Map;
 
+import org.apache.hadoop.util.StringUtils;
 import org.junit.runner.notification.Failure;
 import org.junit.runner.notification.RunListener;
 
@@ -93,8 +94,9 @@ public class TimedOutTestsListener extends RunListener {
           thread.getPriority(),
           thread.getId(),
           Thread.State.WAITING.equals(thread.getState()) ? 
-              "in Object.wait()" : thread.getState().name().toLowerCase(),
-          Thread.State.WAITING.equals(thread.getState()) ? 
+              "in Object.wait()" :
+              StringUtils.toLowerCase(thread.getState().name()),
+          Thread.State.WAITING.equals(thread.getState()) ?
               "WAITING (on object monitor)" : thread.getState()));
       for (StackTraceElement stackTraceElement : e.getValue()) {
         dump.append("\n        at ");

+ 21 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java

@@ -18,10 +18,12 @@
 
 package org.apache.hadoop.util;
 
+import java.util.Locale;
 import static org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix.long2String;
 import static org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix.string2long;
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
@@ -412,6 +414,25 @@ public class TestStringUtils extends UnitTestcaseTimeLimit {
     assertTrue(col.containsAll(Arrays.asList(new String[]{"foo","bar","baz","blah"})));
   }
 
+  @Test
+  public void testLowerAndUpperStrings() {
+    Locale defaultLocale = Locale.getDefault();
+    try {
+      Locale.setDefault(new Locale("tr", "TR"));
+      String upperStr = "TITLE";
+      String lowerStr = "title";
+      // Confirming TR locale.
+      assertNotEquals(lowerStr, upperStr.toLowerCase());
+      assertNotEquals(upperStr, lowerStr.toUpperCase());
+      // This should be true regardless of locale.
+      assertEquals(lowerStr, StringUtils.toLowerCase(upperStr));
+      assertEquals(upperStr, StringUtils.toUpperCase(lowerStr));
+      assertTrue(StringUtils.equalsIgnoreCase(upperStr, lowerStr));
+    } finally {
+      Locale.setDefault(defaultLocale);
+    }
+  }
+
   // Benchmark for StringUtils split
   public static void main(String []args) {
     final String TO_SPLIT = "foo,bar,baz,blah,blah";

+ 4 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java

@@ -382,8 +382,10 @@ public class TestWinUtils {
   private void assertOwners(File file, String expectedUser,
       String expectedGroup) throws IOException {
     String [] args = lsF(file).trim().split("[\\|]");
-    assertEquals(expectedUser.toLowerCase(), args[2].toLowerCase());
-    assertEquals(expectedGroup.toLowerCase(), args[3].toLowerCase());
+    assertEquals(StringUtils.toLowerCase(expectedUser),
+        StringUtils.toLowerCase(args[2]));
+    assertEquals(StringUtils.toLowerCase(expectedGroup),
+        StringUtils.toLowerCase(args[3]));
   }
 
   @Test (timeout = 30000)

+ 3 - 2
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java

@@ -32,6 +32,7 @@ import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
 import org.apache.hadoop.util.LightWeightCache;
 import org.apache.hadoop.util.LightWeightGSet;
 import org.apache.hadoop.util.LightWeightGSet.LinkedElement;
+import org.apache.hadoop.util.StringUtils;
 
 import com.google.common.base.Preconditions;
 
@@ -359,10 +360,10 @@ public class NfsExports {
     AccessPrivilege privilege = AccessPrivilege.READ_ONLY;
     switch (parts.length) {
     case 1:
-      host = parts[0].toLowerCase().trim();
+      host = StringUtils.toLowerCase(parts[0]).trim();
       break;
     case 2:
-      host = parts[0].toLowerCase().trim();
+      host = StringUtils.toLowerCase(parts[0]).trim();
       String option = parts[1].trim();
       if ("rw".equalsIgnoreCase(option)) {
         privilege = AccessPrivilege.READ_WRITE;

+ 3 - 1
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java

@@ -21,6 +21,7 @@ package org.apache.hadoop.fs.http.server;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
+import org.apache.hadoop.util.StringUtils;
 
 import javax.servlet.Filter;
 import javax.servlet.FilterChain;
@@ -82,7 +83,8 @@ public class CheckUploadContentTypeFilter implements Filter {
     String method = httpReq.getMethod();
     if (method.equals("PUT") || method.equals("POST")) {
       String op = httpReq.getParameter(HttpFSFileSystem.OP_PARAM);
-      if (op != null && UPLOAD_OPERATIONS.contains(op.toUpperCase())) {
+      if (op != null && UPLOAD_OPERATIONS.contains(
+          StringUtils.toUpperCase(op))) {
         if ("true".equalsIgnoreCase(httpReq.getParameter(HttpFSParametersProvider.DataParam.NAME))) {
           String contentType = httpReq.getContentType();
           contentTypeOK =

+ 5 - 2
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java

@@ -34,6 +34,7 @@ import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.protocol.AclException;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.lib.service.FileSystemAccess;
+import org.apache.hadoop.util.StringUtils;
 import org.json.simple.JSONArray;
 import org.json.simple.JSONObject;
 
@@ -439,7 +440,8 @@ public class FSOperations {
     @Override
     public JSONObject execute(FileSystem fs) throws IOException {
       boolean result = fs.truncate(path, newLength);
-      return toJSON(HttpFSFileSystem.TRUNCATE_JSON.toLowerCase(), result);
+      return toJSON(
+          StringUtils.toLowerCase(HttpFSFileSystem.TRUNCATE_JSON), result);
     }
 
   }
@@ -568,7 +570,8 @@ public class FSOperations {
     @Override
     public JSONObject execute(FileSystem fs) throws IOException {
       boolean deleted = fs.delete(path, recursive);
-      return toJSON(HttpFSFileSystem.DELETE_JSON.toLowerCase(), deleted);
+      return toJSON(
+          StringUtils.toLowerCase(HttpFSFileSystem.DELETE_JSON), deleted);
     }
 
   }

+ 3 - 1
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java

@@ -30,6 +30,7 @@ import org.apache.hadoop.lib.wsrs.Param;
 import org.apache.hadoop.lib.wsrs.ParametersProvider;
 import org.apache.hadoop.lib.wsrs.ShortParam;
 import org.apache.hadoop.lib.wsrs.StringParam;
+import org.apache.hadoop.util.StringUtils;
 
 import javax.ws.rs.ext.Provider;
 import java.util.HashMap;
@@ -168,7 +169,8 @@ public class HttpFSParametersProvider extends ParametersProvider {
      */
     public OperationParam(String operation) {
       super(NAME, HttpFSFileSystem.Operation.class,
-            HttpFSFileSystem.Operation.valueOf(operation.toUpperCase()));
+            HttpFSFileSystem.Operation.valueOf(
+                StringUtils.toUpperCase(operation)));
     }
   }
 

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java

@@ -22,6 +22,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.lib.util.Check;
 import org.apache.hadoop.lib.util.ConfigurationUtils;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.PropertyConfigurator;
 import org.slf4j.Logger;
@@ -202,7 +203,7 @@ public class Server {
    * @param config server configuration.
    */
   public Server(String name, String homeDir, String configDir, String logDir, String tempDir, Configuration config) {
-    this.name = Check.notEmpty(name, "name").trim().toLowerCase();
+    this.name = StringUtils.toLowerCase(Check.notEmpty(name, "name").trim());
     this.homeDir = Check.notEmpty(homeDir, "homeDir");
     this.configDir = Check.notEmpty(configDir, "configDir");
     this.logDir = Check.notEmpty(logDir, "logDir");

+ 4 - 2
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java

@@ -33,6 +33,7 @@ import org.apache.hadoop.lib.service.Scheduler;
 import org.apache.hadoop.lib.util.Check;
 import org.apache.hadoop.lib.util.ConfigurationUtils;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.VersionInfo;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -254,7 +255,7 @@ public class FileSystemAccessService extends BaseService implements FileSystemAc
   private Set<String> toLowerCase(Collection<String> collection) {
     Set<String> set = new HashSet<String>();
     for (String value : collection) {
-      set.add(value.toLowerCase());
+      set.add(StringUtils.toLowerCase(value));
     }
     return set;
   }
@@ -300,7 +301,8 @@ public class FileSystemAccessService extends BaseService implements FileSystemAc
 
   protected void validateNamenode(String namenode) throws FileSystemAccessException {
     if (nameNodeWhitelist.size() > 0 && !nameNodeWhitelist.contains("*")) {
-      if (!nameNodeWhitelist.contains(namenode.toLowerCase())) {
+      if (!nameNodeWhitelist.contains(
+          StringUtils.toLowerCase(namenode))) {
         throw new FileSystemAccessException(FileSystemAccessException.ERROR.H05, namenode, "not in whitelist");
       }
     }

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java

@@ -34,7 +34,7 @@ public abstract class EnumParam<E extends Enum<E>> extends Param<E> {
 
   @Override
   protected E parse(String str) throws Exception {
-    return Enum.valueOf(klass, str.toUpperCase());
+    return Enum.valueOf(klass, StringUtils.toUpperCase(str));
   }
 
   @Override

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java

@@ -22,6 +22,7 @@ import java.util.EnumSet;
 import java.util.Iterator;
 
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.util.StringUtils;
 
 @InterfaceAudience.Private
 public abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>> {
@@ -37,7 +38,7 @@ public abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>>
     final EnumSet<E> set = EnumSet.noneOf(klass);
     if (!str.isEmpty()) {
       for (String sub : str.split(",")) {
-        set.add(Enum.valueOf(klass, sub.trim().toUpperCase()));
+        set.add(Enum.valueOf(klass, StringUtils.toUpperCase(sub.trim())));
       }
     }
     return set;

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java

@@ -26,6 +26,7 @@ import com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable;
 import com.sun.jersey.spi.inject.Injectable;
 import com.sun.jersey.spi.inject.InjectableProvider;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.util.StringUtils;
 
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.MultivaluedMap;
@@ -70,7 +71,7 @@ public class ParametersProvider
     }
     Enum op;
     try {
-      op = Enum.valueOf(enumClass, str.toUpperCase());
+      op = Enum.valueOf(enumClass, StringUtils.toUpperCase(str));
     } catch (IllegalArgumentException ex) {
       throw new IllegalArgumentException(
         MessageFormat.format("Invalid Operation [{0}]", str));

+ 12 - 7
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java

@@ -24,6 +24,7 @@ import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.XAttr;
 import org.apache.hadoop.fs.XAttr.NameSpace;
+import org.apache.hadoop.util.StringUtils;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
@@ -57,16 +58,20 @@ public class XAttrHelper {
     }
     
     NameSpace ns;
-    final String prefix = name.substring(0, prefixIndex).toLowerCase();
-    if (prefix.equals(NameSpace.USER.toString().toLowerCase())) {
+    final String prefix = name.substring(0, prefixIndex);
+    if (StringUtils.equalsIgnoreCase(prefix, NameSpace.USER.toString())) {
       ns = NameSpace.USER;
-    } else if (prefix.equals(NameSpace.TRUSTED.toString().toLowerCase())) {
+    } else if (
+        StringUtils.equalsIgnoreCase(prefix, NameSpace.TRUSTED.toString())) {
       ns = NameSpace.TRUSTED;
-    } else if (prefix.equals(NameSpace.SYSTEM.toString().toLowerCase())) {
+    } else if (
+        StringUtils.equalsIgnoreCase(prefix, NameSpace.SYSTEM.toString())) {
       ns = NameSpace.SYSTEM;
-    } else if (prefix.equals(NameSpace.SECURITY.toString().toLowerCase())) {
+    } else if (
+        StringUtils.equalsIgnoreCase(prefix, NameSpace.SECURITY.toString())) {
       ns = NameSpace.SECURITY;
-    } else if (prefix.equals(NameSpace.RAW.toString().toLowerCase())) {
+    } else if (
+        StringUtils.equalsIgnoreCase(prefix, NameSpace.RAW.toString())) {
       ns = NameSpace.RAW;
     } else {
       throw new HadoopIllegalArgumentException("An XAttr name must be " +
@@ -145,7 +150,7 @@ public class XAttrHelper {
     }
     
     String namespace = xAttr.getNameSpace().toString();
-    return namespace.toLowerCase() + "." + xAttr.getName();
+    return StringUtils.toLowerCase(namespace) + "." + xAttr.getName();
   }
 
   /**

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java

@@ -30,6 +30,7 @@ import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.datanode.DataNodeLayoutVersion;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.hdfs.server.namenode.NameNodeLayoutVersion;
+import org.apache.hadoop.util.StringUtils;
 
 /************************************
  * Some handy constants
@@ -100,7 +101,7 @@ public class HdfsConstants {
 
     /** Covert the given String to a RollingUpgradeAction. */
     public static RollingUpgradeAction fromString(String s) {
-      return MAP.get(s.toUpperCase());
+      return MAP.get(StringUtils.toUpperCase(s));
     }
   }
 

+ 3 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java

@@ -26,6 +26,7 @@ import org.apache.hadoop.fs.XAttr;
 import org.apache.hadoop.hdfs.XAttrHelper;
 import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants;
+import org.apache.hadoop.util.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -131,7 +132,8 @@ public class BlockStoragePolicySuite {
   }
 
   public static String buildXAttrName() {
-    return XAttrNS.toString().toLowerCase() + "." + STORAGE_POLICY_XATTR_NAME;
+    return StringUtils.toLowerCase(XAttrNS.toString())
+        + "." + STORAGE_POLICY_XATTR_NAME;
   }
 
   public static XAttr buildXAttr(byte policyId) {

+ 3 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java

@@ -27,6 +27,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.server.namenode.MetaRecoveryContext;
 
 import com.google.common.base.Preconditions;
+import org.apache.hadoop.util.StringUtils;
 
 /************************************
  * Some handy internal HDFS constants
@@ -53,7 +54,7 @@ public final class HdfsServerConstants {
 
     public String getOptionString() {
       return StartupOption.ROLLINGUPGRADE.getName() + " "
-          + name().toLowerCase();
+          + StringUtils.toLowerCase(name());
     }
 
     public boolean matches(StartupOption option) {
@@ -76,7 +77,7 @@ public final class HdfsServerConstants {
     public static String getAllOptionString() {
       final StringBuilder b = new StringBuilder("<");
       for(RollingUpgradeStartupOption opt : VALUES) {
-        b.append(opt.name().toLowerCase()).append("|");
+        b.append(StringUtils.toLowerCase(opt.name())).append("|");
       }
       b.setCharAt(b.length() - 1, '>');
       return b.toString();

+ 3 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java

@@ -28,6 +28,7 @@ import java.util.regex.Matcher;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.StorageType;
 import org.apache.hadoop.hdfs.server.common.Util;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * Encapsulates the URI and storage medium that together describe a
@@ -88,7 +89,8 @@ public class StorageLocation {
       String classString = matcher.group(1);
       location = matcher.group(2);
       if (!classString.isEmpty()) {
-        storageType = StorageType.valueOf(classString.toUpperCase());
+        storageType =
+            StorageType.valueOf(StringUtils.toUpperCase(classString));
       }
     }
 

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java

@@ -123,6 +123,7 @@ import org.apache.hadoop.ipc.ClientId;
 import org.apache.hadoop.ipc.RpcConstants;
 import org.apache.hadoop.security.token.delegation.DelegationKey;
 import org.apache.hadoop.util.DataChecksum;
+import org.apache.hadoop.util.StringUtils;
 import org.xml.sax.ContentHandler;
 import org.xml.sax.SAXException;
 import org.xml.sax.helpers.AttributesImpl;
@@ -4350,7 +4351,7 @@ public abstract class FSEditLogOp {
 
     public RollingUpgradeOp(FSEditLogOpCodes code, String name) {
       super(code);
-      this.name = name.toUpperCase();
+      this.name = StringUtils.toUpperCase(name);
     }
 
     static RollingUpgradeOp getStartInstance(OpInstanceCache cache) {

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java

@@ -19,7 +19,7 @@ package org.apache.hadoop.hdfs.server.namenode;
 
 import com.google.common.base.Objects;
 import org.apache.hadoop.fs.StorageType;
-import java.util.Locale;
+import org.apache.hadoop.util.StringUtils;
 
  public class QuotaByStorageTypeEntry {
    private StorageType type;
@@ -54,7 +54,7 @@ import java.util.Locale;
    public String toString() {
      StringBuilder sb = new StringBuilder();
      assert (type != null);
-     sb.append(type.toString().toLowerCase());
+     sb.append(StringUtils.toLowerCase(type.toString()));
      sb.append(':');
      sb.append(quota);
      return sb.toString();

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java

@@ -587,7 +587,7 @@ public class SecondaryNameNode implements Runnable,
       return 0;
     }
     
-    String cmd = opts.getCommand().toString().toLowerCase();
+    String cmd = StringUtils.toLowerCase(opts.getCommand().toString());
     
     int exitCode = 0;
     try {

+ 9 - 8
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java

@@ -34,6 +34,7 @@ import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -79,19 +80,19 @@ public class GetConf extends Configured implements Tool {
     private static final Map<String, CommandHandler> map;
     static  {
       map = new HashMap<String, CommandHandler>();
-      map.put(NAMENODE.getName().toLowerCase(), 
+      map.put(StringUtils.toLowerCase(NAMENODE.getName()),
           new NameNodesCommandHandler());
-      map.put(SECONDARY.getName().toLowerCase(),
+      map.put(StringUtils.toLowerCase(SECONDARY.getName()),
           new SecondaryNameNodesCommandHandler());
-      map.put(BACKUP.getName().toLowerCase(), 
+      map.put(StringUtils.toLowerCase(BACKUP.getName()),
           new BackupNodesCommandHandler());
-      map.put(INCLUDE_FILE.getName().toLowerCase(), 
+      map.put(StringUtils.toLowerCase(INCLUDE_FILE.getName()),
           new CommandHandler(DFSConfigKeys.DFS_HOSTS));
-      map.put(EXCLUDE_FILE.getName().toLowerCase(),
+      map.put(StringUtils.toLowerCase(EXCLUDE_FILE.getName()),
           new CommandHandler(DFSConfigKeys.DFS_HOSTS_EXCLUDE));
-      map.put(NNRPCADDRESSES.getName().toLowerCase(),
+      map.put(StringUtils.toLowerCase(NNRPCADDRESSES.getName()),
           new NNRpcAddressesCommandHandler());
-      map.put(CONFKEY.getName().toLowerCase(),
+      map.put(StringUtils.toLowerCase(CONFKEY.getName()),
           new PrintConfKeyCommandHandler());
     }
     
@@ -116,7 +117,7 @@ public class GetConf extends Configured implements Tool {
     }
     
     public static CommandHandler getHandler(String cmd) {
-      return map.get(cmd.toLowerCase());
+      return map.get(StringUtils.toLowerCase(cmd));
     }
   }
   

+ 4 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java

@@ -24,6 +24,7 @@ import java.io.OutputStream;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * EditsVisitorFactory for different implementations of EditsVisitor
@@ -43,7 +44,7 @@ public class OfflineEditsVisitorFactory {
    */
   static public OfflineEditsVisitor getEditsVisitor(String filename,
     String processor, boolean printToScreen) throws IOException {
-    if(processor.toLowerCase().equals("binary")) {
+    if(StringUtils.equalsIgnoreCase("binary", processor)) {
       return new BinaryEditsVisitor(filename);
     }
     OfflineEditsVisitor vis;
@@ -59,9 +60,9 @@ public class OfflineEditsVisitorFactory {
         outs[1] = System.out;
         out = new TeeOutputStream(outs);
       }
-      if(processor.toLowerCase().equals("xml")) {
+      if(StringUtils.equalsIgnoreCase("xml", processor)) {
         vis = new XmlEditsVisitor(out);
-      } else if(processor.toLowerCase().equals("stats")) {
+      } else if(StringUtils.equalsIgnoreCase("stats", processor)) {
         vis = new StatisticsEditsVisitor(out);
       } else {
         throw new IOException("Unknown proccesor " + processor +

+ 3 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java

@@ -33,6 +33,7 @@ import io.netty.handler.codec.http.QueryStringDecoder;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hdfs.web.JsonUtil;
+import org.apache.hadoop.util.StringUtils;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
@@ -51,6 +52,7 @@ import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1;
 import static org.apache.hadoop.hdfs.server.datanode.web.webhdfs.WebHdfsHandler.APPLICATION_JSON_UTF8;
 import static org.apache.hadoop.hdfs.server.datanode.web.webhdfs.WebHdfsHandler.WEBHDFS_PREFIX;
 import static org.apache.hadoop.hdfs.server.datanode.web.webhdfs.WebHdfsHandler.WEBHDFS_PREFIX_LENGTH;
+
 /**
  * Implement the read-only WebHDFS API for fsimage.
  */
@@ -141,7 +143,7 @@ class FSImageHandler extends SimpleChannelInboundHandler<HttpRequest> {
   private static String getOp(QueryStringDecoder decoder) {
     Map<String, List<String>> parameters = decoder.parameters();
     return parameters.containsKey("op")
-            ? parameters.get("op").get(0).toUpperCase() : null;
+        ? StringUtils.toUpperCase(parameters.get("op").get(0)) : null;
   }
 
   private static String getPath(QueryStringDecoder decoder)

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java

@@ -39,6 +39,7 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
 import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * Subclass of {@link AuthenticationFilter} that
@@ -96,7 +97,7 @@ public class AuthFilter extends AuthenticationFilter {
 
     final Map<String, List<String>> m = new HashMap<String, List<String>>();
     for(Map.Entry<String, String[]> entry : original.entrySet()) {
-      final String key = entry.getKey().toLowerCase();
+      final String key = StringUtils.toLowerCase(entry.getKey());
       List<String> strings = m.get(key);
       if (strings == null) {
         strings = new ArrayList<String>();

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java

@@ -28,6 +28,7 @@ import com.sun.jersey.spi.container.ContainerRequest;
 import com.sun.jersey.spi.container.ContainerRequestFilter;
 import com.sun.jersey.spi.container.ContainerResponseFilter;
 import com.sun.jersey.spi.container.ResourceFilter;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * A filter to change parameter names to lower cases
@@ -75,7 +76,7 @@ public class ParamFilter implements ResourceFilter {
       final MultivaluedMap<String, String> parameters) {
     UriBuilder b = UriBuilder.fromUri(uri).replaceQuery("");
     for(Map.Entry<String, List<String>> e : parameters.entrySet()) {
-      final String key = e.getKey().toLowerCase();
+      final String key = StringUtils.toLowerCase(e.getKey());
       for(String v : e.getValue()) {
         b = b.queryParam(key, v);
       }

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java

@@ -1243,7 +1243,7 @@ public class WebHdfsFileSystem extends FileSystem
     if (query == null) {
       return url;
     }
-    final String lower = query.toLowerCase();
+    final String lower = StringUtils.toLowerCase(query);
     if (!lower.startsWith(OFFSET_PARAM_PREFIX)
         && !lower.contains("&" + OFFSET_PARAM_PREFIX)) {
       return url;
@@ -1254,7 +1254,7 @@ public class WebHdfsFileSystem extends FileSystem
     for(final StringTokenizer st = new StringTokenizer(query, "&");
         st.hasMoreTokens();) {
       final String token = st.nextToken();
-      if (!token.toLowerCase().startsWith(OFFSET_PARAM_PREFIX)) {
+      if (!StringUtils.toLowerCase(token).startsWith(OFFSET_PARAM_PREFIX)) {
         if (b == null) {
           b = new StringBuilder("?").append(token);
         } else {

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java

@@ -18,6 +18,7 @@
 package org.apache.hadoop.hdfs.web.resources;
 
 import java.util.Arrays;
+import org.apache.hadoop.util.StringUtils;
 
 abstract class EnumParam<E extends Enum<E>> extends Param<E, EnumParam.Domain<E>> {
   EnumParam(final Domain<E> domain, final E value) {
@@ -40,7 +41,7 @@ abstract class EnumParam<E extends Enum<E>> extends Param<E, EnumParam.Domain<E>
 
     @Override
     final E parse(final String str) {
-      return Enum.valueOf(enumClass, str.toUpperCase());
+      return Enum.valueOf(enumClass, StringUtils.toUpperCase(str));
     }
   }
 }

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.web.resources;
 import java.util.Arrays;
 import java.util.EnumSet;
 import java.util.Iterator;
+import org.apache.hadoop.util.StringUtils;
 
 abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>, EnumSetParam.Domain<E>> {
   /** Convert an EnumSet to a string of comma separated values. */
@@ -82,7 +83,7 @@ abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>, EnumSet
           i = j > 0 ? j + 1 : 0;
           j = str.indexOf(',', i);
           final String sub = j >= 0? str.substring(i, j): str.substring(i);
-          set.add(Enum.valueOf(enumClass, sub.trim().toUpperCase()));
+          set.add(Enum.valueOf(enumClass, StringUtils.toUpperCase(sub.trim())));
         }
       }
       return set;

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java

@@ -19,7 +19,6 @@
 package org.apache.hadoop.hdfs.server.namenode.snapshot;
 
 import static org.mockito.Matchers.anyObject;
-import static org.mockito.Matchers.anyString;
 import static org.mockito.Mockito.doReturn;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.spy;
@@ -31,6 +30,7 @@ import org.apache.hadoop.hdfs.server.namenode.FSDirectory;
 import org.apache.hadoop.hdfs.server.namenode.INode;
 import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
 import org.apache.hadoop.hdfs.server.namenode.INodesInPath;
+import org.apache.hadoop.util.StringUtils;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -70,7 +70,7 @@ public class TestSnapshotManager {
       Assert.fail("Expected SnapshotException not thrown");
     } catch (SnapshotException se) {
       Assert.assertTrue(
-          se.getMessage().toLowerCase().contains("rollover"));
+          StringUtils.toLowerCase(se.getMessage()).contains("rollover"));
     }
 
     // Delete a snapshot to free up a slot.
@@ -86,7 +86,7 @@ public class TestSnapshotManager {
       Assert.fail("Expected SnapshotException not thrown");
     } catch (SnapshotException se) {
       Assert.assertTrue(
-          se.getMessage().toLowerCase().contains("rollover"));
+          StringUtils.toLowerCase(se.getMessage()).contains("rollover"));
     }
   }
 }

+ 2 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java

@@ -59,6 +59,7 @@ import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
 import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.service.AbstractService;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
 import org.apache.hadoop.yarn.client.api.TimelineClient;
@@ -711,7 +712,7 @@ public class JobHistoryEventHandler extends AbstractService
   private void processEventForTimelineServer(HistoryEvent event, JobId jobId,
           long timestamp) {
     TimelineEvent tEvent = new TimelineEvent();
-    tEvent.setEventType(event.getEventType().name().toUpperCase());
+    tEvent.setEventType(StringUtils.toUpperCase(event.getEventType().name()));
     tEvent.setTimestamp(timestamp);
     TimelineEntity tEntity = new TimelineEntity();
 

+ 3 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java

@@ -22,7 +22,6 @@ import static org.apache.hadoop.yarn.util.StringHelper.join;
 
 import java.io.IOException;
 import java.net.URLDecoder;
-import java.util.Locale;
 
 import javax.servlet.http.HttpServletResponse;
 
@@ -226,8 +225,9 @@ public class AppController extends Controller implements AMParams {
     if (app.getJob() != null) {
       try {
         String tt = $(TASK_TYPE);
-        tt = tt.isEmpty() ? "All" : StringUtils.capitalize(MRApps.taskType(tt).
-            toString().toLowerCase(Locale.US));
+        tt = tt.isEmpty() ? "All" : StringUtils.capitalize(
+            org.apache.hadoop.util.StringUtils.toLowerCase(
+                MRApps.taskType(tt).toString()));
         setTitle(join(tt, " Tasks for ", $(JOB_ID)));
       } catch (Exception e) {
         LOG.error("Failed to render tasks page with task type : "

+ 2 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java

@@ -41,6 +41,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport;
@@ -314,7 +315,7 @@ public class TypeConverter {
       QueueState state) {
     org.apache.hadoop.mapreduce.QueueState qState =
       org.apache.hadoop.mapreduce.QueueState.getState(
-        state.toString().toLowerCase());
+          StringUtils.toLowerCase(state.toString()));
     return qState;
   }
 

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java

@@ -303,7 +303,7 @@ public class MRApps extends Apps {
               remoteFS.getWorkingDirectory()));
           String name = (null == u.getFragment())
               ? p.getName() : u.getFragment();
-          if (!name.toLowerCase().endsWith(".jar")) {
+          if (!StringUtils.toLowerCase(name).endsWith(".jar")) {
             linkLookup.put(p, name);
           }
         }
@@ -317,7 +317,7 @@ public class MRApps extends Apps {
         if (name == null) {
           name = p.getName();
         }
-        if(!name.toLowerCase().endsWith(".jar")) {
+        if(!StringUtils.toLowerCase(name).endsWith(".jar")) {
           MRApps.addToEnvironment(
               environment,
               classpathEnvVar,

+ 4 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java

@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.mapreduce;
 
+import org.apache.hadoop.util.StringUtils;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
@@ -151,9 +152,10 @@ public class TestTypeConverter {
         .newRecord(org.apache.hadoop.yarn.api.records.QueueInfo.class);
     queueInfo.setQueueState(org.apache.hadoop.yarn.api.records.QueueState.STOPPED);
     org.apache.hadoop.mapreduce.QueueInfo returned =
-      TypeConverter.fromYarn(queueInfo, new Configuration());
+        TypeConverter.fromYarn(queueInfo, new Configuration());
     Assert.assertEquals("queueInfo translation didn't work.",
-      returned.getState().toString(), queueInfo.getQueueState().toString().toLowerCase());
+        returned.getState().toString(),
+        StringUtils.toLowerCase(queueInfo.getQueueState().toString()));
   }
 
   /**

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java

@@ -115,7 +115,7 @@ abstract public class Task implements Writable, Configurable {
    * BYTES_READ counter and second one is of the BYTES_WRITTEN counter.
    */
   protected static String[] getFileSystemCounterNames(String uriScheme) {
-    String scheme = uriScheme.toUpperCase();
+    String scheme = StringUtils.toUpperCase(uriScheme);
     return new String[]{scheme+"_BYTES_READ", scheme+"_BYTES_WRITTEN"};
   }
   

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java

@@ -25,7 +25,6 @@ import java.util.Arrays;
 import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.ConcurrentSkipListMap;
 import java.util.Iterator;
-import java.util.Locale;
 import java.util.Map;
 
 import com.google.common.base.Joiner;
@@ -42,6 +41,7 @@ import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.mapreduce.Counter;
 import org.apache.hadoop.mapreduce.FileSystemCounter;
 import org.apache.hadoop.mapreduce.util.ResourceBundles;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * An abstract class to provide common implementation of the filesystem
@@ -227,7 +227,7 @@ public abstract class FileSystemCounterGroup<C extends Counter>
   }
 
   private String checkScheme(String scheme) {
-    String fixed = scheme.toUpperCase(Locale.US);
+    String fixed = StringUtils.toUpperCase(scheme);
     String interned = schemes.putIfAbsent(fixed, fixed);
     if (schemes.size() > MAX_NUM_SCHEMES) {
       // mistakes or abuses

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java

@@ -470,7 +470,7 @@ public class DistributedCache {
         if (fragment == null) {
           return false;
         }
-        String lowerCaseFragment = fragment.toLowerCase();
+        String lowerCaseFragment = StringUtils.toLowerCase(fragment);
         if (fragments.contains(lowerCaseFragment)) {
           return false;
         }
@@ -485,7 +485,7 @@ public class DistributedCache {
         if (fragment == null) {
           return false;
         }
-        String lowerCaseFragment = fragment.toLowerCase();
+        String lowerCaseFragment = StringUtils.toLowerCase(fragment);
         if (fragments.contains(lowerCaseFragment)) {
           return false;
         }

+ 4 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java

@@ -45,6 +45,8 @@ import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.util.StringUtils;
+
 /**
  * A InputFormat that reads input data from an SQL table.
  * <p>
@@ -162,7 +164,8 @@ public class DBInputFormat<T extends DBWritable>
       this.connection = createConnection();
 
       DatabaseMetaData dbMeta = connection.getMetaData();
-      this.dbProductName = dbMeta.getDatabaseProductName().toUpperCase();
+      this.dbProductName =
+          StringUtils.toUpperCase(dbMeta.getDatabaseProductName());
     }
     catch (Exception ex) {
       throw new RuntimeException(ex);

+ 6 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java

@@ -222,12 +222,14 @@ public class CLI extends Configured implements Tool {
       taskType = argv[2];
       taskState = argv[3];
       displayTasks = true;
-      if (!taskTypes.contains(taskType.toUpperCase())) {
+      if (!taskTypes.contains(
+          org.apache.hadoop.util.StringUtils.toUpperCase(taskType))) {
         System.out.println("Error: Invalid task-type: " + taskType);
         displayUsage(cmd);
         return exitCode;
       }
-      if (!taskStates.contains(taskState.toLowerCase())) {
+      if (!taskStates.contains(
+          org.apache.hadoop.util.StringUtils.toLowerCase(taskState))) {
         System.out.println("Error: Invalid task-state: " + taskState);
         displayUsage(cmd);
         return exitCode;
@@ -588,7 +590,8 @@ public class CLI extends Configured implements Tool {
    */
   protected void displayTasks(Job job, String type, String state) 
   throws IOException, InterruptedException {
-    TaskReport[] reports = job.getTaskReports(TaskType.valueOf(type.toUpperCase()));
+    TaskReport[] reports = job.getTaskReports(TaskType.valueOf(
+        org.apache.hadoop.util.StringUtils.toUpperCase(type)));
     for (TaskReport report : reports) {
       TIPStatus status = report.getCurrentStatus();
       if ((state.equalsIgnoreCase("pending") && status ==TIPStatus.PENDING) ||

+ 6 - 6
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java

@@ -155,16 +155,16 @@ public class TestDFSIO implements Tool {
     static ByteMultiple parseString(String sMultiple) {
       if(sMultiple == null || sMultiple.isEmpty()) // MB by default
         return MB;
-      String sMU = sMultiple.toUpperCase();
-      if(B.name().toUpperCase().endsWith(sMU))
+      String sMU = StringUtils.toUpperCase(sMultiple);
+      if(StringUtils.toUpperCase(B.name()).endsWith(sMU))
         return B;
-      if(KB.name().toUpperCase().endsWith(sMU))
+      if(StringUtils.toUpperCase(KB.name()).endsWith(sMU))
         return KB;
-      if(MB.name().toUpperCase().endsWith(sMU))
+      if(StringUtils.toUpperCase(MB.name()).endsWith(sMU))
         return MB;
-      if(GB.name().toUpperCase().endsWith(sMU))
+      if(StringUtils.toUpperCase(GB.name()).endsWith(sMU))
         return GB;
-      if(TB.name().toUpperCase().endsWith(sMU))
+      if(StringUtils.toUpperCase(TB.name()).endsWith(sMU))
         return TB;
       throw new IllegalArgumentException("Unsupported ByteMultiple "+sMultiple);
     }

+ 3 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java

@@ -49,6 +49,7 @@ import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.mapred.lib.LongSumReducer;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.StringUtils;
 
 public class TestFileSystem extends TestCase {
   private static final Log LOG = FileSystem.LOG;
@@ -556,7 +557,8 @@ public class TestFileSystem extends TestCase {
   static void checkPath(MiniDFSCluster cluster, FileSystem fileSys) throws IOException {
     InetSocketAddress add = cluster.getNameNode().getNameNodeAddress();
     // Test upper/lower case
-    fileSys.checkPath(new Path("hdfs://" + add.getHostName().toUpperCase() + ":" + add.getPort()));
+    fileSys.checkPath(new Path("hdfs://"
+        + StringUtils.toUpperCase(add.getHostName()) + ":" + add.getPort()));
   }
 
   public void testFsClose() throws Exception {

+ 4 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java

@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.fs.slive;
 
+import org.apache.hadoop.util.StringUtils;
+
 /**
  * Constants used in various places in slive
  */
@@ -35,7 +37,7 @@ class Constants {
   enum Distribution {
     BEG, END, UNIFORM, MID;
     String lowerName() {
-      return this.name().toLowerCase();
+      return StringUtils.toLowerCase(this.name());
     }
   }
 
@@ -45,7 +47,7 @@ class Constants {
   enum OperationType {
     READ, APPEND, RENAME, LS, MKDIR, DELETE, CREATE, TRUNCATE;
     String lowerName() {
-      return this.name().toLowerCase();
+      return StringUtils.toLowerCase(this.name());
     }
   }
 

+ 2 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java

@@ -19,6 +19,7 @@
 package org.apache.hadoop.fs.slive;
 
 import org.apache.hadoop.fs.slive.Constants.Distribution;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * This class holds the data representing what an operations distribution and
@@ -52,7 +53,7 @@ class OperationData {
       percent = (Double.parseDouble(pieces[0]) / 100.0d);
     } else if (pieces.length >= 2) {
       percent = (Double.parseDouble(pieces[0]) / 100.0d);
-      distribution = Distribution.valueOf(pieces[1].toUpperCase());
+      distribution = Distribution.valueOf(StringUtils.toUpperCase(pieces[1]));
     }
   }
 

+ 3 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java

@@ -19,6 +19,7 @@
 package org.apache.hadoop.fs.slive;
 
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * An operation output has the following object format whereby simple types are
@@ -67,7 +68,8 @@ class OperationOutput {
           "Invalid key format - no type seperator - " + TYPE_SEP);
     }
     try {
-      dataType = OutputType.valueOf(key.substring(0, place).toUpperCase());
+      dataType = OutputType.valueOf(
+          StringUtils.toUpperCase(key.substring(0, place)));
     } catch (Exception e) {
       throw new IllegalArgumentException(
           "Invalid key format - invalid output type", e);

+ 2 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java

@@ -42,6 +42,7 @@ import org.apache.hadoop.mapred.FileOutputFormat;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.TextOutputFormat;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -157,7 +158,7 @@ public class SliveTest implements Tool {
     if (val == null) {
       return false;
     }
-    String cleanupOpt = val.toLowerCase().trim();
+    String cleanupOpt = StringUtils.toLowerCase(val).trim();
     if (cleanupOpt.equals("true") || cleanupOpt.equals("1")) {
       return true;
     } else {

+ 10 - 7
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java

@@ -35,6 +35,7 @@ import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.GzipCodec;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.*;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -214,23 +215,25 @@ public class FileBench extends Configured implements Tool {
           if (!(fmt == Format.txt || cod == CCodec.pln)) {
             for (CType typ : ct) {
               String fn =
-                fmt.name().toUpperCase() + "_" +
-                cod.name().toUpperCase() + "_" +
-                typ.name().toUpperCase();
+                StringUtils.toUpperCase(fmt.name()) + "_" +
+                StringUtils.toUpperCase(cod.name()) + "_" +
+                StringUtils.toUpperCase(typ.name());
               typ.configure(job);
-              System.out.print(rwop.name().toUpperCase() + " " + fn + ": ");
+              System.out.print(
+                  StringUtils.toUpperCase(rwop.name()) + " " + fn + ": ");
               System.out.println(rwop.exec(fn, job) / 1000 +
                   " seconds");
             }
           } else {
             String fn =
-              fmt.name().toUpperCase() + "_" +
-              cod.name().toUpperCase();
+              StringUtils.toUpperCase(fmt.name()) + "_" +
+              StringUtils.toUpperCase(cod.name());
             Path p = new Path(root, fn);
             if (rwop == RW.r && !fs.exists(p)) {
               fn += cod.getExt();
             }
-            System.out.print(rwop.name().toUpperCase() + " " + fn + ": ");
+            System.out.print(
+                StringUtils.toUpperCase(rwop.name()) + " " + fn + ": ");
             System.out.println(rwop.exec(fn, job) / 1000 +
                 " seconds");
           }

+ 2 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java

@@ -45,6 +45,7 @@ import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
 import org.apache.hadoop.mapreduce.MRConfig;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.junit.After;
@@ -280,7 +281,7 @@ public class TestMapRed extends Configured implements Tool {
     public void map(WritableComparable key, Text value,
                     OutputCollector<Text, Text> output,
                     Reporter reporter) throws IOException {
-      String str = value.toString().toLowerCase();
+      String str = StringUtils.toLowerCase(value.toString());
       output.collect(new Text(str), value);
     }
 

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java

@@ -102,7 +102,7 @@ public class DBCountPageView extends Configured implements Tool {
   
   private void createConnection(String driverClassName
       , String url) throws Exception {
-    
+
     Class.forName(driverClassName);
     connection = DriverManager.getConnection(url);
     connection.setAutoCommit(false);

+ 3 - 1
hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java

@@ -13,6 +13,7 @@
  */
 package org.apache.hadoop.maven.plugin.versioninfo;
 
+import java.util.Locale;
 import org.apache.hadoop.maven.plugin.util.Exec;
 import org.apache.hadoop.maven.plugin.util.FileSetUtils;
 import org.apache.maven.model.FileSet;
@@ -329,7 +330,8 @@ public class VersionInfoMojo extends AbstractMojo {
       }
 
       private String normalizePath(File file) {
-        return file.getPath().toUpperCase().replaceAll("\\\\", "/");
+        return file.getPath().toUpperCase(Locale.ENGLISH)
+            .replaceAll("\\\\", "/");
       }
     });
     byte[] md5 = computeMD5(files);

+ 2 - 2
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java

@@ -984,8 +984,8 @@ public class AzureNativeFileSystemStore implements NativeFileSystemStore {
   private String verifyAndConvertToStandardFormat(String rawDir) throws URISyntaxException {
     URI asUri = new URI(rawDir);
     if (asUri.getAuthority() == null 
-        || asUri.getAuthority().toLowerCase(Locale.US).equalsIgnoreCase(
-        		sessionUri.getAuthority().toLowerCase(Locale.US))) {
+        || asUri.getAuthority().toLowerCase(Locale.ENGLISH).equalsIgnoreCase(
+      sessionUri.getAuthority().toLowerCase(Locale.ENGLISH))) {
       // Applies to me.
       return trim(asUri.getPath(), "/");
     } else {

+ 8 - 4
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java

@@ -51,6 +51,7 @@ import org.apache.hadoop.tools.DistCpOptions.FileAttribute;
 import org.apache.hadoop.tools.mapred.UniformSizeInputFormat;
 
 import com.google.common.collect.Maps;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * Utility functions used in DistCp.
@@ -121,8 +122,9 @@ public class DistCpUtils {
    */
   public static Class<? extends InputFormat> getStrategy(Configuration conf,
                                                                  DistCpOptions options) {
-    String confLabel = "distcp." +
-        options.getCopyStrategy().toLowerCase(Locale.getDefault()) + ".strategy.impl";
+    String confLabel = "distcp."
+        + StringUtils.toLowerCase(options.getCopyStrategy())
+        + ".strategy" + ".impl";
     return conf.getClass(confLabel, UniformSizeInputFormat.class, InputFormat.class);
   }
 
@@ -221,7 +223,8 @@ public class DistCpUtils {
 
     final boolean preserveXAttrs = attributes.contains(FileAttribute.XATTR);
     if (preserveXAttrs || preserveRawXattrs) {
-      final String rawNS = XAttr.NameSpace.RAW.name().toLowerCase();
+      final String rawNS =
+          StringUtils.toLowerCase(XAttr.NameSpace.RAW.name());
       Map<String, byte[]> srcXAttrs = srcFileStatus.getXAttrs();
       Map<String, byte[]> targetXAttrs = getXAttrs(targetFS, path);
       if (srcXAttrs != null && !srcXAttrs.equals(targetXAttrs)) {
@@ -321,7 +324,8 @@ public class DistCpUtils {
          copyListingFileStatus.setXAttrs(srcXAttrs);
       } else {
         Map<String, byte[]> trgXAttrs = Maps.newHashMap();
-        final String rawNS = XAttr.NameSpace.RAW.name().toLowerCase();
+        final String rawNS =
+            StringUtils.toLowerCase(XAttr.NameSpace.RAW.name());
         for (Map.Entry<String, byte[]> ent : srcXAttrs.entrySet()) {
           final String xattrName = ent.getKey();
           if (xattrName.startsWith(rawNS)) {

+ 3 - 1
hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java

@@ -169,7 +169,9 @@ public class DistCpV1 implements Tool {
 
     final char symbol;
 
-    private FileAttribute() {symbol = toString().toLowerCase().charAt(0);}
+    private FileAttribute() {
+      symbol = StringUtils.toLowerCase(toString()).charAt(0);
+    }
     
     static EnumSet<FileAttribute> parse(String s) {
       if (s == null || s.length() == 0) {

+ 2 - 1
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java

@@ -25,6 +25,7 @@ import org.apache.hadoop.mapred.gridmix.Statistics.ClusterStats;
 
 import java.util.concurrent.CountDownLatch;
 import java.io.IOException;
+import org.apache.hadoop.util.StringUtils;
 
 enum GridmixJobSubmissionPolicy {
 
@@ -84,6 +85,6 @@ enum GridmixJobSubmissionPolicy {
   public static GridmixJobSubmissionPolicy getPolicy(
     Configuration conf, GridmixJobSubmissionPolicy defaultPolicy) {
     String policy = conf.get(JOB_SUBMISSION_POLICY, defaultPolicy.name());
-    return valueOf(policy.toUpperCase());
+    return valueOf(StringUtils.toUpperCase(policy));
   }
 }

+ 2 - 2
hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemExtendedContract.java

@@ -27,12 +27,12 @@ import org.apache.hadoop.fs.swift.http.RestClientBindings;
 import org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem;
 import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.util.StringUtils;
 import org.junit.Test;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URI;
-import java.util.Locale;
 
 public class TestSwiftFileSystemExtendedContract extends SwiftFileSystemBaseTest {
 
@@ -115,7 +115,7 @@ public class TestSwiftFileSystemExtendedContract extends SwiftFileSystemBaseTest
   public void testFilesystemIsCaseSensitive() throws Exception {
     String mixedCaseFilename = "/test/UPPER.TXT";
     Path upper = path(mixedCaseFilename);
-    Path lower = path(mixedCaseFilename.toLowerCase(Locale.ENGLISH));
+    Path lower = path(StringUtils.toLowerCase(mixedCaseFilename));
     assertFalse("File exists" + upper, fs.exists(upper));
     assertFalse("File exists" + lower, fs.exists(lower));
     FSDataOutputStream out = fs.create(upper);

+ 17 - 16
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java

@@ -38,6 +38,7 @@ import java.util.regex.Pattern;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.util.LineReader;
@@ -319,42 +320,42 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
     }
 
     for (int i = 0; i < args.length - (inputFilename == null ? 0 : 1); ++i) {
-      if ("-h".equals(args[i].toLowerCase())
-          || "-help".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-h", args[i])
+          || StringUtils.equalsIgnoreCase("-help", args[i])) {
         usage();
         return 0;
       }
 
-      if ("-c".equals(args[i].toLowerCase())
-          || "-collect-prefixes".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-c", args[i])
+          || StringUtils.equalsIgnoreCase("-collect-prefixes", args[i])) {
         collecting = true;
         continue;
       }
 
       // these control the job digest
-      if ("-write-job-trace".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-write-job-trace", args[i])) {
         ++i;
         jobTraceFilename = new Path(args[i]);
         continue;
       }
 
-      if ("-single-line-job-traces".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-single-line-job-traces", args[i])) {
         prettyprintTrace = false;
         continue;
       }
 
-      if ("-omit-task-details".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-omit-task-details", args[i])) {
         omitTaskDetails = true;
         continue;
       }
 
-      if ("-write-topology".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-write-topology", args[i])) {
         ++i;
         topologyFilename = new Path(args[i]);
         continue;
       }
 
-      if ("-job-digest-spectra".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-job-digest-spectra", args[i])) {
         ArrayList<Integer> values = new ArrayList<Integer>();
 
         ++i;
@@ -384,13 +385,13 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
         continue;
       }
 
-      if ("-d".equals(args[i].toLowerCase())
-          || "-debug".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-d", args[i])
+          || StringUtils.equalsIgnoreCase("-debug", args[i])) {
         debug = true;
         continue;
       }
 
-      if ("-spreads".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-spreads", args[i])) {
         int min = Integer.parseInt(args[i + 1]);
         int max = Integer.parseInt(args[i + 2]);
 
@@ -404,22 +405,22 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
       }
 
       // These control log-wide CDF outputs
-      if ("-delays".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-delays", args[i])) {
         delays = true;
         continue;
       }
 
-      if ("-runtimes".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-runtimes", args[i])) {
         runtimes = true;
         continue;
       }
 
-      if ("-tasktimes".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-tasktimes", args[i])) {
         collectTaskTimes = true;
         continue;
       }
 
-      if ("-v1".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-v1", args[i])) {
         version = 1;
         continue;
       }

+ 1 - 1
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java

@@ -433,7 +433,7 @@ public class JobBuilder {
       return Values.SUCCESS;
     }
     
-    return Values.valueOf(name.toUpperCase());
+    return Values.valueOf(StringUtils.toUpperCase(name));
   }
 
   private void processTaskUpdatedEvent(TaskUpdatedEvent event) {

+ 2 - 1
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java

@@ -28,6 +28,7 @@ import org.apache.hadoop.mapreduce.jobhistory.JhCounter;
 import org.apache.hadoop.mapreduce.jobhistory.JhCounterGroup;
 import org.apache.hadoop.mapreduce.jobhistory.JhCounters;
 
+import org.apache.hadoop.util.StringUtils;
 import org.codehaus.jackson.annotate.JsonAnySetter;
 
 /**
@@ -243,7 +244,7 @@ public class LoggedTask implements DeepCompare {
   }
 
   private static String canonicalizeCounterName(String nonCanonicalName) {
-    String result = nonCanonicalName.toLowerCase();
+    String result = StringUtils.toLowerCase(nonCanonicalName);
 
     result = result.replace(' ', '|');
     result = result.replace('-', '|');

+ 2 - 1
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java

@@ -23,6 +23,7 @@ import java.util.List;
 import java.util.Set;
 import java.util.TreeSet;
 
+import org.apache.hadoop.util.StringUtils;
 import org.codehaus.jackson.annotate.JsonAnySetter;
 
 // HACK ALERT!!!  This "should" have have two subclasses, which might be called
@@ -611,7 +612,7 @@ public class LoggedTaskAttempt implements DeepCompare {
   }
   
   private static String canonicalizeCounterName(String nonCanonicalName) {
-    String result = nonCanonicalName.toLowerCase();
+    String result = StringUtils.toLowerCase(nonCanonicalName);
 
     result = result.replace(' ', '|');
     result = result.replace('-', '|');

+ 2 - 1
hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java

@@ -25,6 +25,7 @@ import java.util.*;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * This is a class used to get the current environment
@@ -43,7 +44,7 @@ public class Environment extends Properties {
     // http://lopica.sourceforge.net/os.html
     String command = null;
     String OS = System.getProperty("os.name");
-    String lowerOs = OS.toLowerCase();
+    String lowerOs = StringUtils.toLowerCase(OS);
     if (OS.indexOf("Windows") > -1) {
       command = "cmd /C set";
     } else if (lowerOs.indexOf("ix") > -1 || lowerOs.indexOf("linux") > -1

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java

@@ -36,6 +36,7 @@ import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -173,7 +174,7 @@ public class ApplicationCLI extends YarnCLI {
           if (types != null) {
             for (String type : types) {
               if (!type.trim().isEmpty()) {
-                appTypes.add(type.toUpperCase().trim());
+                appTypes.add(StringUtils.toUpperCase(type).trim());
               }
             }
           }
@@ -191,8 +192,8 @@ public class ApplicationCLI extends YarnCLI {
                   break;
                 }
                 try {
-                  appStates.add(YarnApplicationState.valueOf(state
-                      .toUpperCase().trim()));
+                  appStates.add(YarnApplicationState.valueOf(
+                      StringUtils.toUpperCase(state).trim()));
                 } catch (IllegalArgumentException ex) {
                   sysout.println("The application state " + state
                       + " is invalid.");

+ 2 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java

@@ -111,7 +111,8 @@ public class NodeCLI extends YarnCLI {
         if (types != null) {
           for (String type : types) {
             if (!type.trim().isEmpty()) {
-              nodeStates.add(NodeState.valueOf(type.trim().toUpperCase()));
+              nodeStates.add(NodeState.valueOf(
+                  org.apache.hadoop.util.StringUtils.toUpperCase(type.trim())));
             }
           }
         }

+ 4 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java

@@ -26,6 +26,7 @@ import java.util.Set;
 import org.apache.commons.lang.math.LongRange;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
 import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
 import org.apache.hadoop.yarn.api.records.YarnApplicationState;
@@ -213,7 +214,7 @@ public class GetApplicationsRequestPBImpl extends GetApplicationsRequest {
     // Convert applicationTags to lower case and add
     this.applicationTags = new HashSet<String>();
     for (String tag : tags) {
-      this.applicationTags.add(tag.toLowerCase());
+      this.applicationTags.add(StringUtils.toLowerCase(tag));
     }
   }
 
@@ -258,7 +259,8 @@ public class GetApplicationsRequestPBImpl extends GetApplicationsRequest {
   public void setApplicationStates(Set<String> applicationStates) {
     EnumSet<YarnApplicationState> appStates = null;
     for (YarnApplicationState state : YarnApplicationState.values()) {
-      if (applicationStates.contains(state.name().toLowerCase())) {
+      if (applicationStates.contains(
+          StringUtils.toLowerCase(state.name()))) {
         if (appStates == null) {
           appStates = EnumSet.of(state);
         } else {

+ 2 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java

@@ -23,6 +23,7 @@ import java.util.Set;
 
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
@@ -291,7 +292,7 @@ extends ApplicationSubmissionContext {
     // Convert applicationTags to lower case and add
     this.applicationTags = new HashSet<String>();
     for (String tag : tags) {
-      this.applicationTags.add(tag.toLowerCase());
+      this.applicationTags.add(StringUtils.toLowerCase(tag));
     }
   }
 

+ 3 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java

@@ -23,7 +23,6 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URISyntaxException;
 import java.security.PrivilegedExceptionAction;
-import java.util.Locale;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Future;
@@ -47,6 +46,7 @@ import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.RunJar;
 import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.LocalResource;
 import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
 
@@ -272,7 +272,7 @@ public class FSDownload implements Callable<Path> {
   private long unpack(File localrsrc, File dst) throws IOException {
     switch (resource.getType()) {
     case ARCHIVE: {
-      String lowerDst = dst.getName().toLowerCase(Locale.ENGLISH);
+      String lowerDst = StringUtils.toLowerCase(dst.getName());
       if (lowerDst.endsWith(".jar")) {
         RunJar.unJar(localrsrc, dst);
       } else if (lowerDst.endsWith(".zip")) {
@@ -291,7 +291,7 @@ public class FSDownload implements Callable<Path> {
     }
     break;
     case PATTERN: {
-      String lowerDst = dst.getName().toLowerCase(Locale.ENGLISH);
+      String lowerDst = StringUtils.toLowerCase(dst.getName());
       if (lowerDst.endsWith(".jar")) {
         String p = resource.getPattern();
         RunJar.unJar(localrsrc, dst,

+ 3 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java

@@ -26,7 +26,6 @@ import java.lang.annotation.Annotation;
 import java.lang.reflect.Method;
 import java.lang.reflect.ParameterizedType;
 import java.lang.reflect.Type;
-import java.util.Locale;
 import java.util.Set;
 import java.util.regex.Pattern;
 
@@ -35,6 +34,7 @@ import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.webapp.WebAppException;
 
 import org.slf4j.Logger;
@@ -241,7 +241,7 @@ public class HamletGen {
     puts(indent, "\n",
          "private <T extends _> ", retName, "<T> ", methodName,
          "_(T e, boolean inline) {\n",
-         "  return new ", retName, "<T>(\"", retName.toLowerCase(Locale.US),
+         "  return new ", retName, "<T>(\"", StringUtils.toLowerCase(retName),
          "\", e, opt(", !endTagOptional.contains(retName), ", inline, ",
          retName.equals("PRE"), ")); }");
   }
@@ -258,7 +258,7 @@ public class HamletGen {
       puts(0, ") {");
       puts(indent,
            topMode ? "" : "  closeAttrs();\n",
-           "  return ", retName.toLowerCase(Locale.US), "_(this, ",
+           "  return ", StringUtils.toLowerCase(retName), "_" + "(this, ",
            isInline(className, retName), ");\n", "}");
     } else if (params.length == 1) {
       puts(0, "String selector) {");

+ 2 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java

@@ -88,7 +88,8 @@ public class RegistryUtils {
    * @return the converted username
    */
   public static String convertUsername(String username) {
-    String converted= username.toLowerCase(Locale.ENGLISH);
+    String converted =
+        org.apache.hadoop.util.StringUtils.toLowerCase(username);
     int atSymbol = converted.indexOf('@');
     if (atSymbol > 0) {
       converted = converted.substring(0, atSymbol);

+ 3 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java

@@ -31,6 +31,7 @@ import javax.ws.rs.QueryParam;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.MediaType;
 
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
 import org.apache.hadoop.yarn.server.webapp.WebServices;
@@ -147,7 +148,8 @@ public class AHSWebServices extends WebServices {
     }
     Set<String> appStates = parseQueries(statesQuery, true);
     for (String appState : appStates) {
-      switch (YarnApplicationState.valueOf(appState.toUpperCase())) {
+      switch (YarnApplicationState.valueOf(
+          StringUtils.toUpperCase(appState))) {
         case FINISHED:
         case FAILED:
         case KILLED:

Một số tệp đã không được hiển thị bởi vì quá nhiều tập tin thay đổi trong này khác