Browse Source

HADOOP-19134. Use StringBuilder instead of StringBuffer. (#6692). Contributed by PJ Fanning

PJ Fanning 11 months ago
parent
commit
59dba6e1bd
100 changed files with 136 additions and 137 deletions
  1. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
  2. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
  3. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
  4. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcDeniedReply.java
  5. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java
  6. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java
  7. 4 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
  8. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
  9. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
  10. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java
  11. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java
  12. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java
  13. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/functional/TestRemoteIterators.java
  14. 1 1
      hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
  15. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java
  16. 3 4
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
  17. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageCorruption.java
  18. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java
  19. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSTrash.java
  20. 4 4
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockManager.java
  21. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsDataLocality.java
  22. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/ConfBlock.java
  23. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java
  24. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java
  25. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java
  26. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunner.java
  27. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java
  28. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java
  29. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java
  30. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/InvalidInputException.java
  31. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MultiFileSplit.java
  32. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SortedRanges.java
  33. 3 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java
  34. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java
  35. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java
  36. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobStatus.java
  37. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCompletionEvent.java
  38. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/aggregate/ValueHistogram.java
  39. 4 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionHelper.java
  40. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java
  41. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileSplit.java
  42. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java
  43. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/InvalidInputException.java
  44. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/jobcontrol/ControlledJob.java
  45. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/join/TupleWritable.java
  46. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/split/JobSplit.java
  47. 5 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java
  48. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleSchedulerImpl.java
  49. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
  50. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java
  51. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
  52. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java
  53. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobInfo.java
  54. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java
  55. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/VerifyJobsUtils.java
  56. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/RandomTextWriterJob.java
  57. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/AccumulatingReducer.java
  58. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/JHLogAnalyzer.java
  59. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRBench.java
  60. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java
  61. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java
  62. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java
  63. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java
  64. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java
  65. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java
  66. 4 4
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java
  67. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java
  68. 3 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java
  69. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/JobControlTestUtils.java
  70. 3 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MapReduceTestUtil.java
  71. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/RandomTextWriter.java
  72. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java
  73. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRCJCFileOutputCommitter.java
  74. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java
  75. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/RandomTextWriter.java
  76. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Pentomino.java
  77. 1 1
      hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java
  78. 2 2
      hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java
  79. 1 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/AbstractITCommitProtocol.java
  80. 1 1
      hadoop-tools/hadoop-datajoin/src/main/java/org/apache/hadoop/contrib/utils/join/JobBase.java
  81. 1 1
      hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpSync.java
  82. 1 1
      hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
  83. 1 1
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java
  84. 1 1
      hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeMapRed.java
  85. 1 1
      hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java
  86. 0 2
      hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java
  87. 1 1
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java
  88. 1 1
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/UtilTest.java
  89. 1 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEntityGroupId.java
  90. 5 5
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/resource/PlacementConstraint.java
  91. 1 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java
  92. 1 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java
  93. 2 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
  94. 1 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java
  95. 1 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/store/sql/FederationQueryRunner.java
  96. 1 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/deletion/task/DockerContainerDeletionTask.java
  97. 1 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/privileged/PrivilegedOperationExecutor.java
  98. 1 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkPacketTaggingHandlerImpl.java
  99. 2 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficControlBandwidthHandlerImpl.java
  100. 1 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java

@@ -169,7 +169,7 @@ public class KeyShell extends CommandShell {
 
   @Override
   public String getCommandUsage() {
-    StringBuffer sbuf = new StringBuffer(USAGE_PREFIX + COMMANDS);
+    StringBuilder sbuf = new StringBuilder(USAGE_PREFIX + COMMANDS);
     String banner = StringUtils.repeat("=", 66);
     sbuf.append(banner + "\n");
     sbuf.append(CreateCommand.USAGE + ":\n\n" + CreateCommand.DESC + "\n");

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java

@@ -163,7 +163,7 @@ public class DF extends Shell {
   @VisibleForTesting
   protected void parseOutput() throws IOException {
     if (output.size() < 2) {
-      StringBuffer sb = new StringBuffer("Fewer lines of output than expected");
+      StringBuilder sb = new StringBuilder("Fewer lines of output than expected");
       if (output.size() > 0) {
         sb.append(": " + output.get(0));
       }

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java

@@ -1052,7 +1052,7 @@ public class FileUtil {
 
   private static void unTarUsingTar(File inFile, File untarDir,
       boolean gzipped) throws IOException {
-    StringBuffer untarCommand = new StringBuffer();
+    StringBuilder untarCommand = new StringBuilder();
     // not using canonical path here; this postpones relative path
     // resolution until bash is executed.
     final String source = "'" + FileUtil.makeSecureShellPath(inFile) + "'";

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcDeniedReply.java

@@ -58,7 +58,7 @@ public class RpcDeniedReply extends RpcReply {
 
   @Override
   public String toString() {
-    return new StringBuffer().append("xid:").append(xid)
+    return new StringBuilder().append("xid:").append(xid)
         .append(",messageType:").append(messageType).append("verifier_flavor:")
         .append(verifier.getFlavor()).append("rejectState:")
         .append(rejectState).toString();

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java

@@ -148,7 +148,7 @@ public final class ProviderUtils {
     if (providerPath == null) {
       return config;
     }
-    StringBuffer newProviderPath = new StringBuffer();
+    StringBuilder newProviderPath = new StringBuilder();
     String[] providers = providerPath.split(",");
     Path path = null;
     for (String provider: providers) {

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java

@@ -127,7 +127,7 @@ public class CredentialShell extends CommandShell {
 
   @Override
   public String getCommandUsage() {
-    StringBuffer sbuf = new StringBuffer(USAGE_PREFIX + COMMANDS);
+    StringBuilder sbuf = new StringBuilder(USAGE_PREFIX + COMMANDS);
     String banner = StringUtils.repeat("=", 66);
     sbuf.append(banner + "\n")
         .append(CreateCommand.USAGE + ":\n\n" + CreateCommand.DESC + "\n")

+ 4 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java

@@ -370,7 +370,7 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
                     strictWithSubDomains);
             }
             // Build up lists of allowed hosts For logging/debugging purposes.
-            StringBuffer buf = new StringBuffer(32);
+            StringBuilder buf = new StringBuilder(32);
             buf.append('<');
             for (int i = 0; i < hosts.length; i++) {
                 String h = hosts[i];
@@ -408,15 +408,15 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
                 throw new SSLException(msg);
             }
 
-            // StringBuffer for building the error message.
-            buf = new StringBuffer();
+            // StringBuilder for building the error message.
+            buf = new StringBuilder();
 
             boolean match = false;
             out:
             for (Iterator<String> it = names.iterator(); it.hasNext();) {
                 // Don't trim the CN, though!
                 final String cn = StringUtils.toLowerCase(it.next());
-                // Store CN in StringBuffer in case we need to report an error.
+                // Store CN in StringBuilder in case we need to report an error.
             buf.append(" <")
             .append(cn)
             .append('>');

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java

@@ -1014,7 +1014,7 @@ public abstract class Shell {
     BufferedReader inReader =
             new BufferedReader(new InputStreamReader(process.getInputStream(),
                 StandardCharsets.UTF_8));
-    final StringBuffer errMsg = new StringBuffer();
+    final StringBuilder errMsg = new StringBuilder();
 
     // read error and input streams as this would free up the buffers
     // free the error stream buffer
@@ -1208,7 +1208,7 @@ public abstract class Shell {
       implements CommandExecutor {
 
     private String[] command;
-    private StringBuffer output;
+    private StringBuilder output;
 
 
     public ShellCommandExecutor(String[] execString) {
@@ -1289,7 +1289,7 @@ public abstract class Shell {
 
     @Override
     protected void parseExecResult(BufferedReader lines) throws IOException {
-      output = new StringBuffer();
+      output = new StringBuilder();
       char[] buf = new char[512];
       int nRead;
       while ( (nRead = lines.read(buf, 0, buf.length)) > 0 ) {

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java

@@ -1334,7 +1334,7 @@ public class StringUtils {
 
       int inputLineLength = str.length();
       int offset = 0;
-      StringBuffer wrappedLine = new StringBuffer(inputLineLength + 32);
+      StringBuilder wrappedLine = new StringBuilder(inputLineLength + 32);
 
       while(inputLineLength - offset > wrapLength) {
         if(str.charAt(offset) == 32) {

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java

@@ -580,7 +580,7 @@ public class TestCount {
     public String toString(boolean hOption,
         boolean tOption, List<StorageType> types) {
       if (tOption) {
-        StringBuffer result = new StringBuffer();
+        StringBuilder result = new StringBuilder();
         result.append(hOption ? HUMAN : BYTES);
 
         for (StorageType type : types) {

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java

@@ -114,7 +114,7 @@ public class TestCredentialProviderFactory {
   }
 
   private static char[] generatePassword(int length) {
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     Random r = new Random();
     for (int i = 0; i < length; i++) {
       sb.append(chars[r.nextInt(chars.length)]);

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java

@@ -480,7 +480,7 @@ public class TestShell extends Assert {
   @Test(timeout=120000)
   public void testDestroyAllShellProcesses() throws Throwable {
     Assume.assumeFalse(WINDOWS);
-    StringBuffer sleepCommand = new StringBuffer();
+    StringBuilder sleepCommand = new StringBuilder();
     sleepCommand.append("sleep 200");
     String[] shellCmd = {"bash", "-c", sleepCommand.toString()};
     final ShellCommandExecutor shexc1 = new ShellCommandExecutor(shellCmd);

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/functional/TestRemoteIterators.java

@@ -86,7 +86,7 @@ public class TestRemoteIterators extends AbstractHadoopTestBase {
    */
   @Test
   public void testSingleton() throws Throwable {
-    StringBuffer result = new StringBuffer();
+    StringBuilder result = new StringBuilder();
     String name = "singleton";
     RemoteIterator<String> it = remoteIteratorFromSingleton(name);
     assertStringValueContains(it, "SingletonIterator");

+ 1 - 1
hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java

@@ -167,7 +167,7 @@ public class TestKMS {
       if (kmsUrl == null || kmsUrl.size() == 0) {
         return null;
       }
-      StringBuffer sb = new StringBuffer();
+      StringBuilder sb = new StringBuilder();
 
       for (int i = 0; i < kmsUrl.size(); i++) {
         sb.append(KMSClientProvider.SCHEME_NAME + "://" +

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java

@@ -195,7 +195,7 @@ public class DFSck extends Configured implements Tool {
     final String cookiePrefix = "Cookie:";
     boolean allDone = false;
     while (!allDone) {
-      final StringBuffer url = new StringBuffer(baseUrl);
+      final StringBuilder url = new StringBuilder(baseUrl);
       if (cookie > 0) {
         url.append("&startblockafter=").append(String.valueOf(cookie));
       }

+ 3 - 4
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java

@@ -32,7 +32,6 @@ import org.apache.hadoop.hdfs.util.XMLUtils.InvalidXmlException;
 import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp;
 import org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes;
 import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.OpInstanceCache;
-import org.apache.hadoop.hdfs.tools.offlineEditsViewer.OfflineEditsViewer;
 import org.apache.hadoop.hdfs.util.XMLUtils.Stanza;
 import org.xml.sax.Attributes;
 import org.xml.sax.InputSource;
@@ -57,7 +56,7 @@ class OfflineEditsXmlLoader
   private Stanza stanza;
   private Stack<Stanza> stanzaStack;
   private FSEditLogOpCodes opCode;
-  private StringBuffer cbuf;
+  private StringBuilder cbuf;
   private long nextTxId;
   private final OpInstanceCache opCache = new OpInstanceCache();
   
@@ -119,7 +118,7 @@ class OfflineEditsXmlLoader
     stanza = null;
     stanzaStack = new Stack<Stanza>();
     opCode = null;
-    cbuf = new StringBuffer();
+    cbuf = new StringBuilder();
     nextTxId = -1;
   }
   
@@ -182,7 +181,7 @@ class OfflineEditsXmlLoader
   @Override
   public void endElement (String uri, String name, String qName) {
     String str = XMLUtils.unmangleXmlString(cbuf.toString(), false).trim();
-    cbuf = new StringBuffer();
+    cbuf = new StringBuilder();
     switch (state) {
     case EXPECT_EDITS_TAG:
       throw new InvalidXmlException("expected <EDITS/>");

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageCorruption.java

@@ -85,7 +85,7 @@ public class PBImageCorruption {
   }
 
   String getType() {
-    StringBuffer s = new StringBuffer();
+    StringBuilder s = new StringBuilder();
     if (type.contains(PBImageCorruptionType.CORRUPT_NODE)) {
       s.append(PBImageCorruptionType.CORRUPT_NODE);
     }

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java

@@ -340,7 +340,7 @@ public class DFSTestUtil {
         for (int idx = 0; idx < nLevels; idx++) {
           levels[idx] = gen.nextInt(10);
         }
-        StringBuffer sb = new StringBuffer();
+        StringBuilder sb = new StringBuilder();
         for (int idx = 0; idx < nLevels; idx++) {
           sb.append(dirNames[levels[idx]]);
           sb.append("/");

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSTrash.java

@@ -180,7 +180,7 @@ public class TestHDFSTrash {
       FileSystem fileSystem, Configuration config) throws IOException {
     // generate an unique path per instance
     UUID trashId = UUID.randomUUID();
-    StringBuffer sb = new StringBuffer()
+    StringBuilder sb = new StringBuilder()
         .append(ugi.getUserName())
         .append("-")
         .append(trashId.toString());

+ 4 - 4
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockManager.java

@@ -1833,7 +1833,7 @@ public class TestBlockManager {
     DataInputStream in = new DataInputStream(fstream);
 
     BufferedReader reader = new BufferedReader(new InputStreamReader(in));
-    StringBuffer buffer = new StringBuffer();
+    StringBuilder buffer = new StringBuilder();
     String line;
     try {
       while ((line = reader.readLine()) != null) {
@@ -1861,7 +1861,7 @@ public class TestBlockManager {
     FileInputStream fstream = new FileInputStream(file);
     DataInputStream in = new DataInputStream(fstream);
     BufferedReader reader = new BufferedReader(new InputStreamReader(in));
-    StringBuffer buffer = new StringBuffer();
+    StringBuilder buffer = new StringBuilder();
     String line;
     try {
       while ((line = reader.readLine()) != null) {
@@ -1933,7 +1933,7 @@ public class TestBlockManager {
     FileInputStream fstream = new FileInputStream(file);
     DataInputStream in = new DataInputStream(fstream);
     BufferedReader reader = new BufferedReader(new InputStreamReader(in));
-    StringBuffer buffer = new StringBuffer();
+    StringBuilder buffer = new StringBuilder();
     String line;
     try {
       while ((line = reader.readLine()) != null) {
@@ -1989,7 +1989,7 @@ public class TestBlockManager {
     FileInputStream fstream = new FileInputStream(file);
     DataInputStream in = new DataInputStream(fstream);
     BufferedReader reader = new BufferedReader(new InputStreamReader(in));
-    StringBuffer buffer = new StringBuffer();
+    StringBuilder buffer = new StringBuilder();
     String line;
     try {
       while ((line = reader.readLine()) != null) {

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsDataLocality.java

@@ -196,7 +196,7 @@ public class TestWebHdfsDataLocality {
       //For GETFILECHECKSUM, OPEN and APPEND,
       //the chosen datanode must be different with exclude nodes.
 
-      StringBuffer sb = new StringBuffer();
+      StringBuilder sb = new StringBuilder();
       for (int i = 0; i < 2; i++) {
         sb.append(locations[i].getXferAddr());
         { // test GETFILECHECKSUM

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/ConfBlock.java

@@ -83,7 +83,7 @@ public class ConfBlock extends HtmlBlock {
               __().
       tbody();
       for (ConfEntryInfo entry : info.getProperties()) {
-        StringBuffer buffer = new StringBuffer();
+        StringBuilder buffer = new StringBuilder();
         String[] sources = entry.getSource();
         //Skip the last entry, because it is always the same HDFS file, and
         // output them in reverse order so most recent is output first

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java

@@ -2080,7 +2080,7 @@ public class TestRecovery {
 
   private void validateOutput() throws IOException {
     File expectedFile = new File(new Path(outputDir, partFile).toString());
-    StringBuffer expectedOutput = new StringBuffer();
+    StringBuilder expectedOutput = new StringBuilder();
     expectedOutput.append(key1).append('\t').append(val1).append("\n");
     expectedOutput.append(val1).append("\n");
     expectedOutput.append(val2).append("\n");

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java

@@ -516,7 +516,7 @@ public class TestAMWebServicesAttempts extends JerseyTestBase {
     String expectDiag = "";
     List<String> diagnosticsList = ta.getDiagnostics();
     if (diagnosticsList != null && !diagnostics.isEmpty()) {
-      StringBuffer b = new StringBuffer();
+      StringBuilder b = new StringBuilder();
       for (String diag : diagnosticsList) {
         b.append(diag);
       }

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java

@@ -600,7 +600,7 @@ public class TestAMWebServicesJobs extends JerseyTestBase {
     String diagString = "";
     List<String> diagList = job.getDiagnostics();
     if (diagList != null && !diagList.isEmpty()) {
-      StringBuffer b = new StringBuffer();
+      StringBuilder b = new StringBuilder();
       for (String diag : diagList) {
         b.append(diag);
       }

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunner.java

@@ -1027,8 +1027,8 @@ public class LocalJobRunner implements ClientProtocol {
     String taskId = t.getTaskID().toString();
     boolean isCleanup = t.isTaskCleanupTask();
     String user = t.getUser();
-    StringBuffer childMapredLocalDir =
-        new StringBuffer(localDirs[0] + Path.SEPARATOR
+    StringBuilder childMapredLocalDir =
+        new StringBuilder(localDirs[0] + Path.SEPARATOR
             + getLocalTaskDir(user, jobId, taskId, isCleanup));
     for (int i = 1; i < localDirs.length; i++) {
       childMapredLocalDir.append("," + localDirs[i] + Path.SEPARATOR

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java

@@ -145,7 +145,7 @@ public class MRWebAppUtil {
     InetSocketAddress address = NetUtils.createSocketAddr(
       hsAddress, getDefaultJHSWebappPort(),
       getDefaultJHSWebappURLWithoutScheme());
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     if (address.getAddress() != null &&
         (address.getAddress().isAnyLocalAddress() ||
          address.getAddress().isLoopbackAddress())) {

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java

@@ -102,7 +102,7 @@ public class TestLocalModeWithNewApis {
   static String readOutput(Path outDir, Configuration conf) 
       throws IOException {
     FileSystem fs = outDir.getFileSystem(conf);
-    StringBuffer result = new StringBuffer();
+    StringBuilder result = new StringBuilder();
 
     Path[] fileList = FileUtil.stat2Paths(fs.listStatus(outDir,
            new Utils.OutputFileUtils.OutputFilesFilter()));

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java

@@ -470,7 +470,7 @@ public abstract class FileInputFormat<K, V> implements InputFormat<K, V> {
    */ 
   public static void setInputPaths(JobConf conf, Path... inputPaths) {
     Path path = new Path(conf.getWorkingDirectory(), inputPaths[0]);
-    StringBuffer str = new StringBuffer(StringUtils.escapeString(path.toString()));
+    StringBuilder str = new StringBuilder(StringUtils.escapeString(path.toString()));
     for(int i = 1; i < inputPaths.length;i++) {
       str.append(StringUtils.COMMA_STR);
       path = new Path(conf.getWorkingDirectory(), inputPaths[i]);

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/InvalidInputException.java

@@ -61,7 +61,7 @@ public class InvalidInputException extends IOException {
    * @return the concatenated messages from all of the problems.
    */
   public String getMessage() {
-    StringBuffer result = new StringBuffer();
+    StringBuilder result = new StringBuilder();
     Iterator<IOException> itr = problems.iterator();
     while(itr.hasNext()) {
       result.append(itr.next().getMessage());

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MultiFileSplit.java

@@ -70,7 +70,7 @@ public class MultiFileSplit extends CombineFileSplit {
 
   @Override
   public String toString() {
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     for(int i=0; i < getPaths().length; i++) {
       sb.append(getPath(i).toUri().getPath() + ":0+" + getLength(i));
       if (i < getPaths().length -1) {

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SortedRanges.java

@@ -207,7 +207,7 @@ class SortedRanges implements Writable{
   }
   
   public String toString() {
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     Iterator<Range> it = ranges.iterator();
     while(it.hasNext()) {
       Range range = it.next();

+ 3 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java

@@ -518,8 +518,8 @@ public class TaskLog {
                                 throws IOException {
     
     String stdout = FileUtil.makeShellPath(stdoutFilename);
-    String stderr = FileUtil.makeShellPath(stderrFilename);    
-    StringBuffer mergedCmd = new StringBuffer();
+    String stderr = FileUtil.makeShellPath(stderrFilename);
+    StringBuilder mergedCmd = new StringBuilder();
     
     // Export the pid of taskJvm to env variable JVM_PID.
     // Currently pid is not used on Windows
@@ -606,7 +606,7 @@ public class TaskLog {
    */
   public static String addCommand(List<String> cmd, boolean isExecutable) 
   throws IOException {
-    StringBuffer command = new StringBuffer();
+    StringBuilder command = new StringBuilder();
     for(String s: cmd) {
     	command.append('\'');
       if (isExecutable) {

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java

@@ -96,7 +96,7 @@ public class FieldSelectionMapReduce<K, V>
       LoggerFactory.getLogger("FieldSelectionMapReduce");
 
   private String specToString() {
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     sb.append("fieldSeparator: ").append(fieldSeparator).append("\n");
 
     sb.append("mapOutputKeyValueSpec: ").append(mapOutputKeyValueSpec).append(

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java

@@ -476,7 +476,7 @@ public class Job extends JobContextImpl implements JobContext, AutoCloseable {
     } catch (IOException e) {
     } catch (InterruptedException ie) {
     }
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     sb.append("Job: ").append(status.getJobID()).append("\n");
     sb.append("Job File: ").append(status.getJobFile()).append("\n");
     sb.append("Job Tracking URL : ").append(status.getTrackingUrl());

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobStatus.java

@@ -636,7 +636,7 @@ public class JobStatus implements Writable, Cloneable {
   }
   
   public String toString() {
-    StringBuffer buffer = new StringBuffer();
+    StringBuilder buffer = new StringBuilder();
     buffer.append("job-id : " + jobid);
     buffer.append("uber-mode : " + isUber);
     buffer.append("map-progress : " + mapProgress);

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCompletionEvent.java

@@ -188,7 +188,7 @@ public class TaskCompletionEvent implements Writable{
     
   @Override
   public String toString(){
-    StringBuffer buf = new StringBuffer(); 
+    StringBuilder buf = new StringBuilder();
     buf.append("Task Id : "); 
     buf.append(taskId); 
     buf.append(", Status : ");  

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/aggregate/ValueHistogram.java

@@ -83,7 +83,7 @@ public class ValueHistogram implements ValueAggregator<String> {
   public String getReport() {
     long[] counts = new long[items.size()];
 
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     Iterator<Object> iter = items.values().iterator();
     int i = 0;
     while (iter.hasNext()) {
@@ -133,7 +133,7 @@ public class ValueHistogram implements ValueAggregator<String> {
    * the histogram
    */
   public String getReportDetails() {
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     Iterator<Entry<Object,Object>> iter = items.entrySet().iterator();
     while (iter.hasNext()) {
       Entry<Object,Object> en = iter.next();

+ 4 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionHelper.java

@@ -121,10 +121,10 @@ public class FieldSelectionHelper {
       int allFieldsFrom, String separator) {
     String retv = null;
     int i = 0;
-    StringBuffer sb = null;
+    StringBuilder sb = null;
     if (fieldList != null && fieldList.size() > 0) {
       if (sb == null) {
-        sb = new StringBuffer();
+        sb = new StringBuilder();
       }
       for (Integer index : fieldList) {
         if (index < fields.length) {
@@ -135,7 +135,7 @@ public class FieldSelectionHelper {
     }
     if (allFieldsFrom >= 0) {
       if (sb == null) {
-        sb = new StringBuffer();
+        sb = new StringBuilder();
       }
       for (i = allFieldsFrom; i < fields.length; i++) {
         sb.append(fields[i]).append(separator);
@@ -168,7 +168,7 @@ public class FieldSelectionHelper {
   public static String specToString(String fieldSeparator, String keyValueSpec,
       int allValueFieldsFrom, List<Integer> keyFieldList,
       List<Integer> valueFieldList) {
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     sb.append("fieldSeparator: ").append(fieldSeparator).append("\n");
 
     sb.append("keyValueSpec: ").append(keyValueSpec).append("\n");

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java

@@ -803,7 +803,7 @@ public abstract class CombineFileInputFormat<K, V>
     }
 
     public String toString() {
-      StringBuffer buf = new StringBuffer();
+      StringBuilder buf = new StringBuilder();
       buf.append("[");
       for (PathFilter f: filters) {
         buf.append(f);

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileSplit.java

@@ -175,7 +175,7 @@ public class CombineFileSplit extends InputSplit implements Writable {
   
   @Override
  public String toString() {
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     for (int i = 0; i < paths.length; i++) {
       if (i == 0 ) {
         sb.append("Paths:");
@@ -188,7 +188,7 @@ public class CombineFileSplit extends InputSplit implements Writable {
     }
     if (locations != null) {
       String locs = "";
-      StringBuffer locsb = new StringBuffer();
+      StringBuilder locsb = new StringBuilder();
       for (int i = 0; i < locations.length; i++) {
         locsb.append(locations[i] + ":");
       }

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java

@@ -569,7 +569,7 @@ public abstract class FileInputFormat<K, V> extends InputFormat<K, V> {
                                    Path... inputPaths) throws IOException {
     Configuration conf = job.getConfiguration();
     Path path = inputPaths[0].getFileSystem(conf).makeQualified(inputPaths[0]);
-    StringBuffer str = new StringBuffer(StringUtils.escapeString(path.toString()));
+    StringBuilder str = new StringBuilder(StringUtils.escapeString(path.toString()));
     for(int i = 1; i < inputPaths.length;i++) {
       str.append(StringUtils.COMMA_STR);
       path = inputPaths[i].getFileSystem(conf).makeQualified(inputPaths[i]);

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/InvalidInputException.java

@@ -60,7 +60,7 @@ public class InvalidInputException extends IOException {
    * @return the concatenated messages from all of the problems.
    */
   public String getMessage() {
-    StringBuffer result = new StringBuffer();
+    StringBuilder result = new StringBuilder();
     Iterator<IOException> itr = problems.iterator();
     while(itr.hasNext()) {
       result.append(itr.next().getMessage());

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/jobcontrol/ControlledJob.java

@@ -90,7 +90,7 @@ public class ControlledJob {
 	
   @Override
   public String toString() {
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     sb.append("job name:\t").append(this.job.getJobName()).append("\n");
     sb.append("job id:\t").append(this.controlID).append("\n");
     sb.append("job state:\t").append(this.state).append("\n");

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/join/TupleWritable.java

@@ -147,7 +147,7 @@ public class TupleWritable implements Writable, Iterable<Writable> {
    * <tt>[&lt;child1&gt;,&lt;child2&gt;,...,&lt;childn&gt;]</tt>
    */
   public String toString() {
-    StringBuffer buf = new StringBuffer("[");
+    StringBuilder buf = new StringBuilder("[");
     for (int i = 0; i < values.length; ++i) {
       buf.append(has(i) ? values[i].toString() : "");
       buf.append(",");

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/split/JobSplit.java

@@ -123,7 +123,7 @@ public class JobSplit {
     
     @Override
     public String toString() {
-      StringBuffer buf = new StringBuffer();
+      StringBuilder buf = new StringBuilder();
       buf.append("data-size : " + inputDataLength + "\n");
       buf.append("start-offset : " + startOffset + "\n");
       buf.append("locations : " + "\n");

+ 5 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java

@@ -678,7 +678,7 @@ public class Fetcher<K, V> extends Thread {
   private URL getMapOutputURL(MapHost host, Collection<TaskAttemptID> maps
                               )  throws MalformedURLException {
     // Get the base url
-    StringBuffer url = new StringBuffer(host.getBaseUrl());
+    StringBuilder url = new StringBuilder(host.getBaseUrl());
     
     boolean first = true;
     for (TaskAttemptID mapId : maps) {
@@ -688,8 +688,10 @@ public class Fetcher<K, V> extends Thread {
       url.append(mapId);
       first = false;
     }
-   
-    LOG.debug("MapOutput URL for " + host + " -> " + url.toString());
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("MapOutput URL for " + host + " -> " + url.toString());
+    }
     return new URL(url.toString());
   }
   

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleSchedulerImpl.java

@@ -171,7 +171,7 @@ public class ShuffleSchedulerImpl<K,V> implements ShuffleScheduler<K,V> {
   }
 
   static URI getBaseURI(TaskAttemptID reduceId, String url) {
-    StringBuffer baseUrl = new StringBuffer(url);
+    StringBuilder baseUrl = new StringBuilder(url);
     if (!url.endsWith("/")) {
       baseUrl.append("/");
     }

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java

@@ -520,7 +520,7 @@ public class CLI extends Configured implements Tool {
   }
   
   private String getJobPriorityNames() {
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     for (JobPriority p : JobPriority.values()) {
       // UNDEFINED_PRIORITY need not to be displayed in usage
       if (JobPriority.UNDEFINED_PRIORITY == p) {

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java

@@ -175,7 +175,7 @@ public class TestFileOutputCommitter {
   private void validateContent(Path dir) throws IOException {
     File fdir = new File(dir.toUri().getPath());
     File expectedFile = new File(fdir, partFile);
-    StringBuffer expectedOutput = new StringBuffer();
+    StringBuilder expectedOutput = new StringBuilder();
     expectedOutput.append(key1).append('\t').append(val1).append("\n");
     expectedOutput.append(val1).append("\n");
     expectedOutput.append(val2).append("\n");

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java

@@ -227,7 +227,7 @@ public class TestFileOutputCommitter {
   private void validateContent(File dir) throws IOException {
     File expectedFile = new File(dir, partFile);
     assertTrue("Could not find "+expectedFile, expectedFile.exists());
-    StringBuffer expectedOutput = new StringBuffer();
+    StringBuilder expectedOutput = new StringBuilder();
     expectedOutput.append(key1).append('\t').append(val1).append("\n");
     expectedOutput.append(val1).append("\n");
     expectedOutput.append(val2).append("\n");

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java

@@ -109,7 +109,7 @@ public class HsJobBlock extends HtmlBlock {
     // todo - switch to use JobInfo
     List<String> diagnostics = j.getDiagnostics();
     if(diagnostics != null && !diagnostics.isEmpty()) {
-      StringBuffer b = new StringBuffer();
+      StringBuilder b = new StringBuilder();
       for(String diag: diagnostics) {
         b.append(addTaskLinks(diag));
       }

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobInfo.java

@@ -117,7 +117,7 @@ public class JobInfo {
       this.diagnostics = "";
       List<String> diagnostics = job.getDiagnostics();
       if (diagnostics != null && !diagnostics.isEmpty()) {
-        StringBuffer b = new StringBuffer();
+        StringBuilder b = new StringBuilder();
         for (String diag : diagnostics) {
           b.append(diag);
         }

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java

@@ -534,7 +534,7 @@ public class TestHsWebServicesAttempts extends JerseyTestBase {
     String expectDiag = "";
     List<String> diagnosticsList = ta.getDiagnostics();
     if (diagnosticsList != null && !diagnostics.isEmpty()) {
-      StringBuffer b = new StringBuffer();
+      StringBuilder b = new StringBuilder();
       for (String diag : diagnosticsList) {
         b.append(diag);
       }

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/VerifyJobsUtils.java

@@ -108,7 +108,7 @@ public class VerifyJobsUtils {
     String diagString = "";
     List<String> diagList = job.getDiagnostics();
     if (diagList != null && !diagList.isEmpty()) {
-      StringBuffer b = new StringBuffer();
+      StringBuilder b = new StringBuilder();
       for (String diag : diagList) {
         b.append(diag);
       }

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/RandomTextWriterJob.java

@@ -204,7 +204,7 @@ public class RandomTextWriterJob extends Configured implements Tool {
     }
     
     private Text generateSentence(int noWords) {
-      StringBuffer sentence = new StringBuffer();
+      StringBuilder sentence = new StringBuilder();
       String space = " ";
       for (int i=0; i < noWords; ++i) {
         sentence.append(words[random.nextInt(words.length)]);

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/AccumulatingReducer.java

@@ -73,7 +73,7 @@ public class AccumulatingReducer extends MapReduceBase
 
     // concatenate strings
     if (field.startsWith(VALUE_TYPE_STRING)) {
-      StringBuffer sSum = new StringBuffer();
+      StringBuilder sSum = new StringBuilder();
       while (values.hasNext())
         sSum.append(values.next().toString()).append(";");
       output.collect(key, new Text(sSum.toString()));

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/JHLogAnalyzer.java

@@ -773,7 +773,7 @@ public class JHLogAnalyzer {
     /**
      * Read lines until one ends with a " ." or "\" "
      */
-    private StringBuffer resBuffer = new StringBuffer();
+    private StringBuilder resBuffer = new StringBuilder();
     private String readLine(BufferedReader reader) throws IOException {
       resBuffer.setLength(0);
       reader.mark(maxJobDelimiterLineLength);

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRBench.java

@@ -132,7 +132,7 @@ public class MRBench extends Configured implements Tool{
    */
   private static String pad(long number, int length) {
     String str = String.valueOf(number);
-    StringBuffer value = new StringBuffer(); 
+    StringBuilder value = new StringBuilder();
     for (int i = str.length(); i < length; i++) {
       value.append("0"); 
     }

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java

@@ -677,7 +677,7 @@ public class TestConcatenatedCompressedInput {
   }
 
   private static String unquote(String in) {
-    StringBuffer result = new StringBuffer();
+    StringBuilder result = new StringBuilder();
     for(int i=0; i < in.length(); ++i) {
       char ch = in.charAt(i);
       if (ch == '\\') {

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java

@@ -236,7 +236,7 @@ public class TestFixedLengthInputFormat {
     }
     Writer writer = new OutputStreamWriter(ostream);
     try {
-      StringBuffer sb = new StringBuffer();
+      StringBuilder sb = new StringBuilder();
       for (int i = 0; i < numRecords; i++) {
         for (int j = 0; j < recordLen; j++) {
           sb.append(chars[charRand.nextInt(chars.length)]);

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java

@@ -105,7 +105,7 @@ public class TestMRCJCFileOutputCommitter {
     
     // validate output
     File expectedFile = new File(new Path(outDir, file).toString());
-    StringBuffer expectedOutput = new StringBuffer();
+    StringBuilder expectedOutput = new StringBuilder();
     expectedOutput.append(key1).append('\t').append(val1).append("\n");
     expectedOutput.append(val1).append("\n");
     expectedOutput.append(val2).append("\n");

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java

@@ -119,7 +119,7 @@ public class TestMapProgress {
     
     public AMFeedback statusUpdate(TaskAttemptID taskId, TaskStatus taskStatus) 
     throws IOException, InterruptedException {
-      StringBuffer buf = new StringBuffer("Task ");
+      StringBuilder buf = new StringBuilder("Task ");
       buf.append(taskId);
       if (taskStatus != null) {
         buf.append(" making progress to ");

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java

@@ -763,7 +763,7 @@ public class TestMapRed extends Configured implements Tool {
       SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, inFile,
                                                              Text.class, Text.class);
 
-      StringBuffer content = new StringBuffer();
+      StringBuilder content = new StringBuilder();
 
       for (int i = 0; i < 1000; i++) {
         content.append(i).append(": This is one more line of content\n");

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java

@@ -80,7 +80,7 @@ public class TestMiniMRClasspath {
     FileSystem fs = FileSystem.get(fileSys, conf);
     configureWordCount(fs, conf, input, numMaps, numReduces, inDir, outDir);
     JobClient.runJob(conf);
-    StringBuffer result = new StringBuffer();
+    StringBuilder result = new StringBuilder();
     {
       Path[] parents = FileUtil.stat2Paths(fs.listStatus(outDir.getParent()));
       Path[] fileList = FileUtil.stat2Paths(fs.listStatus(outDir,
@@ -137,7 +137,7 @@ public class TestMiniMRClasspath {
     // set the tests jar file
     conf.setJarByClass(TestMiniMRClasspath.class);
     JobClient.runJob(conf);
-    StringBuffer result = new StringBuffer();
+    StringBuilder result = new StringBuilder();
     Path[] fileList = FileUtil.stat2Paths(fs.listStatus(outDir,
                                  new Utils.OutputFileUtils
                                           .OutputFilesFilter()));

+ 4 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java

@@ -106,7 +106,7 @@ public class TestMultipleTextOutputFormat {
     File expectedFile_11 = new File(new Path(workDir, file_11).toString()); 
 
     //System.out.printf("expectedFile_11: %s\n", new Path(workDir, file_11).toString());
-    StringBuffer expectedOutput = new StringBuffer();
+    StringBuilder expectedOutput = new StringBuilder();
     for (int i = 10; i < 20; i++) {
       expectedOutput.append(""+i).append('\t').append(""+i).append("\n");
     }
@@ -118,7 +118,7 @@ public class TestMultipleTextOutputFormat {
     
     File expectedFile_12 = new File(new Path(workDir, file_12).toString()); 
     //System.out.printf("expectedFile_12: %s\n", new Path(workDir, file_12).toString());
-    expectedOutput = new StringBuffer();
+    expectedOutput = new StringBuilder();
     for (int i = 20; i < 30; i++) {
       expectedOutput.append(""+i).append('\t').append(""+i).append("\n");
     }
@@ -130,7 +130,7 @@ public class TestMultipleTextOutputFormat {
     
     File expectedFile_13 = new File(new Path(workDir, file_13).toString()); 
     //System.out.printf("expectedFile_13: %s\n", new Path(workDir, file_13).toString());
-    expectedOutput = new StringBuffer();
+    expectedOutput = new StringBuilder();
     for (int i = 30; i < 40; i++) {
       expectedOutput.append(""+i).append('\t').append(""+i).append("\n");
     }
@@ -142,7 +142,7 @@ public class TestMultipleTextOutputFormat {
     
     File expectedFile_2 = new File(new Path(workDir, file_2).toString()); 
     //System.out.printf("expectedFile_2: %s\n", new Path(workDir, file_2).toString());
-    expectedOutput = new StringBuffer();
+    expectedOutput = new StringBuilder();
     for (int i = 10; i < 40; i++) {
       expectedOutput.append(""+i).append('\t').append(""+i).append("\n");
     }

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java

@@ -548,7 +548,7 @@ public class TestTextInputFormat {
   }
   
   private static String unquote(String in) {
-    StringBuffer result = new StringBuffer();
+    StringBuilder result = new StringBuilder();
     for(int i=0; i < in.length(); ++i) {
       char ch = in.charAt(i);
       if (ch == '\\') {

+ 3 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java

@@ -91,7 +91,7 @@ public class UtilsForTests {
   }
 
   public static String formatBytes(long numBytes) {
-    StringBuffer buf = new StringBuffer();
+    StringBuilder buf = new StringBuilder();
     boolean bDetails = true;
     double num = numBytes;
 
@@ -116,7 +116,7 @@ public class UtilsForTests {
   }
 
   public static String formatBytes2(long numBytes) {
-    StringBuffer buf = new StringBuffer();
+    StringBuilder buf = new StringBuilder();
     long u = 0;
     if (numBytes >= TB) {
       u = numBytes / TB;
@@ -145,7 +145,7 @@ public class UtilsForTests {
   static final String regexpSpecials = "[]()?*+|.!^-\\~@";
 
   public static String regexpEscape(String plain) {
-    StringBuffer buf = new StringBuffer();
+    StringBuilder buf = new StringBuilder();
     char[] ch = plain.toCharArray();
     int csup = ch.length;
     for (int c = 0; c < csup; c++) {

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/JobControlTestUtils.java

@@ -82,7 +82,7 @@ public class JobControlTestUtils {
   private static String generateRandomLine() {
     long r = rand.nextLong() % 7;
     long n = r + 20;
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     for (int i = 0; i < n; i++) {
       sb.append(generateRandomWord()).append(" ");
     }

+ 3 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MapReduceTestUtil.java

@@ -97,7 +97,7 @@ public class MapReduceTestUtil {
   public static String generateRandomLine() {
     long r = rand.nextLong() % 7;
     long n = r + 20;
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     for (int i = 0; i < n; i++) {
       sb.append(generateRandomWord()).append(" ");
     }
@@ -401,7 +401,7 @@ public class MapReduceTestUtil {
   public static String readOutput(Path outDir, Configuration conf) 
       throws IOException {
     FileSystem fs = outDir.getFileSystem(conf);
-    StringBuffer result = new StringBuffer();
+    StringBuilder result = new StringBuilder();
 
     Path[] fileList = FileUtil.stat2Paths(fs.listStatus(outDir,
            new Utils.OutputFileUtils.OutputFilesFilter()));
@@ -436,7 +436,7 @@ public class MapReduceTestUtil {
       org.apache.hadoop.mapred.TaskAttemptID taskId, boolean isCleanup)
       throws IOException {
     // string buffer to store task log
-    StringBuffer result = new StringBuffer();
+    StringBuilder result = new StringBuilder();
     int res;
 
     // reads the whole tasklog into inputstream

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/RandomTextWriter.java

@@ -100,7 +100,7 @@ public class RandomTextWriter extends Configured implements Tool {
 
   public static String generateSentenceWithRand(ThreadLocalRandom rand,
       int noWords) {
-    StringBuffer sentence = new StringBuffer(words[rand.nextInt(words.length)]);
+    StringBuilder sentence = new StringBuilder(words[rand.nextInt(words.length)]);
     for (int i = 1; i < noWords; i++) {
       sentence.append(" ").append(words[rand.nextInt(words.length)]);
     }

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java

@@ -262,7 +262,7 @@ public class TestFixedLengthInputFormat {
     }
     Writer writer = new OutputStreamWriter(ostream);
     try {
-      StringBuffer sb = new StringBuffer();
+      StringBuilder sb = new StringBuilder();
       for (int i = 0; i < numRecords; i++) {
         for (int j = 0; j < recordLen; j++) {
           sb.append(chars[charRand.nextInt(chars.length)]);

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRCJCFileOutputCommitter.java

@@ -119,7 +119,7 @@ public class TestMRCJCFileOutputCommitter {
 
     // validate output
     File expectedFile = new File(new Path(outDir, partFile).toString());
-    StringBuffer expectedOutput = new StringBuffer();
+    StringBuilder expectedOutput = new StringBuilder();
     expectedOutput.append(key1).append('\t').append(val1).append("\n");
     expectedOutput.append(val1).append("\n");
     expectedOutput.append(val2).append("\n");

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java

@@ -106,7 +106,7 @@ public class MiniMRYarnCluster extends MiniYARNCluster {
               JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS,
               JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT);    }
     address = NetUtils.getConnectAddress(address);
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
     InetAddress resolved = address.getAddress();
     if (resolved == null || resolved.isAnyLocalAddress() || 
         resolved.isLoopbackAddress()) {

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/RandomTextWriter.java

@@ -154,7 +154,7 @@ public class RandomTextWriter extends Configured implements Tool {
     }
     
     private Text generateSentence(int noWords) {
-      StringBuffer sentence = new StringBuffer();
+      StringBuilder sentence = new StringBuilder();
       String space = " ";
       for (int i=0; i < noWords; ++i) {
         sentence.append(words[random.nextInt(words.length)]);

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Pentomino.java

@@ -142,7 +142,7 @@ public class Pentomino {
   public static String stringifySolution(int width, int height, 
                                          List<List<ColumnName>> solution) {
     String[][] picture = new String[height][width];
-    StringBuffer result = new StringBuffer();
+    StringBuilder result = new StringBuilder();
     // for each piece placement...
     for(List<ColumnName> row: solution) {
       // go through to find which piece was placed

+ 1 - 1
hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java

@@ -66,7 +66,7 @@ public class Sudoku {
    */
   static String stringifySolution(int size, List<List<ColumnName>> solution) {
     int[][] picture = new int[size][size];
-    StringBuffer result = new StringBuffer();
+    StringBuilder result = new StringBuilder();
     // go through the rows selected in the model and build a picture of the
     // solution.
     for(List<ColumnName> row: solution) {

+ 2 - 2
hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java

@@ -47,7 +47,7 @@ class TeraScheduler {
       this.filename = filename;
     }
     public String toString() {
-      StringBuffer result = new StringBuffer();
+      StringBuilder result = new StringBuilder();
       result.append(filename);
       result.append(" on ");
       for(Host host: locations) {
@@ -64,7 +64,7 @@ class TeraScheduler {
       this.hostname = hostname;
     }
     public String toString() {
-      StringBuffer result = new StringBuffer();
+      StringBuilder result = new StringBuilder();
       result.append(splits.size());
       result.append(" ");
       result.append(hostname);

+ 1 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/AbstractITCommitProtocol.java

@@ -722,7 +722,7 @@ public abstract class AbstractITCommitProtocol extends AbstractCommitITest {
     }
     Path expectedFile = getPart0000(dir);
     log().debug("Validating content in {}", expectedFile);
-    StringBuffer expectedOutput = new StringBuffer();
+    StringBuilder expectedOutput = new StringBuilder();
     expectedOutput.append(KEY_1).append('\t').append(VAL_1).append("\n");
     expectedOutput.append(VAL_1).append("\n");
     expectedOutput.append(VAL_2).append("\n");

+ 1 - 1
hadoop-tools/hadoop-datajoin/src/main/java/org/apache/hadoop/contrib/utils/join/JobBase.java

@@ -143,7 +143,7 @@ public abstract class JobBase implements Mapper, Reducer {
    * 
    */
   protected String getReport() {
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
 
     Iterator iter = this.longCounters.entrySet().iterator();
     while (iter.hasNext()) {

+ 1 - 1
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpSync.java

@@ -614,7 +614,7 @@ class DistCpSync {
     if (sourcePath.equals(renameItem.getSource())) {
       return renameItem.getTarget();
     }
-    StringBuffer sb = new StringBuffer(sourcePath.toString());
+    StringBuilder sb = new StringBuilder(sourcePath.toString());
     String remain =
         sb.substring(renameItem.getSource().toString().length() + 1);
     return new Path(renameItem.getTarget(), remain);

+ 1 - 1
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java

@@ -155,7 +155,7 @@ public class DistCpUtils {
    * @return - String containing first letters of each attribute to preserve
    */
   public static String packAttributes(EnumSet<FileAttribute> attributes) {
-    StringBuffer buffer = new StringBuffer(FileAttribute.values().length);
+    StringBuilder buffer = new StringBuilder(FileAttribute.values().length);
     int len = 0;
     for (FileAttribute attribute : attributes) {
       buffer.append(attribute.name().charAt(0));

+ 1 - 1
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java

@@ -140,7 +140,7 @@ public class NodeName implements AnonymizableDataType<String> {
   }
   
   private void anonymize(StatePool pool) {
-    StringBuffer buf = new StringBuffer();
+    StringBuilder buf = new StringBuilder();
     NodeNameState state = (NodeNameState) pool.getState(getClass());
     if (state == null) {
       state = new NodeNameState();

+ 1 - 1
hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeMapRed.java

@@ -254,7 +254,7 @@ public abstract class PipeMapRed {
   }
 
   String safeEnvVarName(String var) {
-    StringBuffer safe = new StringBuffer();
+    StringBuilder safe = new StringBuilder();
     int len = var.length();
     for (int i = 0; i < len; i++) {
       char c = var.charAt(i);

+ 1 - 1
hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java

@@ -291,7 +291,7 @@ public class StreamJob implements Tool {
         LOG.warn("-file option is deprecated, please use generic option" +
         		" -files instead.");
 
-        StringBuffer fileList = new StringBuffer();
+        StringBuilder fileList = new StringBuilder();
         for (String file : values) {
           packageFiles_.add(file);
           try {

+ 0 - 2
hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java

@@ -23,9 +23,7 @@ import java.nio.charset.StandardCharsets;
 import java.util.regex.*;
 
 import org.apache.hadoop.io.DataOutputBuffer;
-import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.mapred.Reporter;

+ 1 - 1
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java

@@ -128,7 +128,7 @@ public class TestMultipleArchiveFiles extends TestStreaming
   }
 
   protected void checkOutput() throws IOException {
-    StringBuffer output = new StringBuffer(256);
+    StringBuilder output = new StringBuilder(256);
     Path[] fileList = FileUtil.stat2Paths(fileSys.listStatus(
                                             new Path(OUTPUT_DIR)));
     for (int i = 0; i < fileList.length; i++){

+ 1 - 1
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/UtilTest.java

@@ -86,7 +86,7 @@ class UtilTest {
   }
 
   public static String collate(List<String> args, String sep) {
-    StringBuffer buf = new StringBuffer();
+    StringBuilder buf = new StringBuilder();
     Iterator<String> it = args.iterator();
     while (it.hasNext()) {
       if (buf.length() > 0) {

+ 1 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEntityGroupId.java

@@ -144,7 +144,7 @@ public class TimelineEntityGroupId implements
 
   public static TimelineEntityGroupId
       fromString(String timelineEntityGroupIdStr) {
-    StringBuffer buf = new StringBuffer();
+    StringBuilder buf = new StringBuilder();
     Iterator<String> it = SPLITTER.split(timelineEntityGroupIdStr).iterator();
     if (!it.next().equals(TIMELINE_ENTITY_GROUPID_STR_PREFIX)) {
       throw new IllegalArgumentException(

+ 5 - 5
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/resource/PlacementConstraint.java

@@ -413,7 +413,7 @@ public class PlacementConstraint {
 
     @Override
     public String toString() {
-      StringBuffer sb = new StringBuffer();
+      StringBuilder sb = new StringBuilder();
       if (TargetType.ALLOCATION_TAG == this.targetType) {
         // following by a comma separated tags
         sb.append(String.join(",", getTargetValues()));
@@ -643,7 +643,7 @@ public class PlacementConstraint {
 
     @Override
     public String toString() {
-      StringBuffer sb = new StringBuffer();
+      StringBuilder sb = new StringBuilder();
       sb.append("cardinality").append(",").append(getScope()).append(",");
       for (String tag : getAllocationTags()) {
         sb.append(tag).append(",");
@@ -717,7 +717,7 @@ public class PlacementConstraint {
 
     @Override
     public String toString() {
-      StringBuffer sb = new StringBuffer();
+      StringBuilder sb = new StringBuilder();
       sb.append("and(");
       Iterator<AbstractConstraint> it = getChildren().iterator();
       while (it.hasNext()) {
@@ -759,7 +759,7 @@ public class PlacementConstraint {
 
     @Override
     public String toString() {
-      StringBuffer sb = new StringBuffer();
+      StringBuilder sb = new StringBuilder();
       sb.append("or(");
       Iterator<AbstractConstraint> it = getChildren().iterator();
       while (it.hasNext()) {
@@ -805,7 +805,7 @@ public class PlacementConstraint {
 
     @Override
     public String toString() {
-      StringBuffer sb = new StringBuffer();
+      StringBuilder sb = new StringBuilder();
       sb.append("DelayedOr(");
       Iterator<TimedPlacementConstraint> it = getChildren().iterator();
       while (it.hasNext()) {

+ 1 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java

@@ -1491,7 +1491,7 @@ public class TestLogsCLI {
   private String readContainerContent(Path containerPath,
       FileSystem fs) throws IOException {
     assertTrue(fs.exists(containerPath));
-    StringBuffer inputLine = new StringBuffer();
+    StringBuilder inputLine = new StringBuilder();
     try (BufferedReader reader = new BufferedReader(new InputStreamReader(
         fs.open(containerPath)))) {
       String tmp;

+ 1 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java

@@ -568,7 +568,7 @@ public class ProcfsBasedProcessTree extends ResourceCalculatorProcessTree {
    */
   @Override
   public String toString() {
-    StringBuffer pTree = new StringBuffer("[ ");
+    StringBuilder pTree = new StringBuilder("[ ");
     for (String p : processTree.keySet()) {
       pTree.append(p);
       pTree.append(" ");

+ 2 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java

@@ -130,7 +130,7 @@ public class JQueryUI extends HtmlBlock {
         }
         // for inserting stateSaveInit
         int pos = init.indexOf('{') + 1;  
-        init = new StringBuffer(init).insert(pos, stateSaveInit).toString(); 
+        init = new StringBuilder(init).insert(pos, stateSaveInit).toString();
         list.add(join(id, "DataTable =  $('#", id, "').dataTable(", init,
                       ").fnSetFilteringDelay(188);"));
         String postInit = $(postInitID(DATATABLES, id));
@@ -146,7 +146,7 @@ public class JQueryUI extends HtmlBlock {
         init = defaultInit;
       }      
       int pos = init.indexOf('{') + 1;  
-      init = new StringBuffer(init).insert(pos, stateSaveInit).toString();  
+      init = new StringBuilder(init).insert(pos, stateSaveInit).toString();
       list.add(join("  $('", escapeEcmaScript(selector), "').dataTable(", init,
                ").fnSetFilteringDelay(288);"));      
       

+ 1 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java

@@ -401,7 +401,7 @@ public class TestAggregatedLogFormat {
         new BufferedReader(new FileReader(new File(remoteAppLogFile
             .toUri().getRawPath())));
     String line;
-    StringBuffer sb = new StringBuffer("");
+    StringBuilder sb = new StringBuilder("");
     while ((line = in.readLine()) != null) {
       LOG.info(line);
       sb.append(line);

+ 1 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/store/sql/FederationQueryRunner.java

@@ -181,7 +181,7 @@ public class FederationQueryRunner {
       causeMessage = "";
     }
 
-    StringBuffer msg = new StringBuffer(causeMessage);
+    StringBuilder msg = new StringBuilder(causeMessage);
     msg.append(" Query: ");
     msg.append(sql);
     msg.append(" Parameters: ");

+ 1 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/deletion/task/DockerContainerDeletionTask.java

@@ -65,7 +65,7 @@ public class DockerContainerDeletionTask extends DeletionTask
    */
   @Override
   public String toString() {
-    StringBuffer sb = new StringBuffer("DockerContainerDeletionTask : ");
+    StringBuilder sb = new StringBuilder("DockerContainerDeletionTask : ");
     sb.append("  id : ").append(this.getTaskId());
     sb.append("  containerId : ").append(this.containerId);
     return sb.toString().trim();

+ 1 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/privileged/PrivilegedOperationExecutor.java

@@ -281,7 +281,7 @@ public class PrivilegedOperationExecutor {
       return null;
     }
 
-    StringBuffer finalOpArg = new StringBuffer(PrivilegedOperation
+    StringBuilder finalOpArg = new StringBuilder(PrivilegedOperation
         .CGROUP_ARG_PREFIX);
     boolean noTasks = true;
 

+ 1 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkPacketTaggingHandlerImpl.java

@@ -104,7 +104,7 @@ public class NetworkPacketTaggingHandlerImpl
     //executable.
     String tasksFile = cGroupsHandler.getPathForCGroupTasks(
         CGroupsHandler.CGroupController.NET_CLS, containerIdStr);
-    String opArg = new StringBuffer(PrivilegedOperation.CGROUP_ARG_PREFIX)
+    String opArg = new StringBuilder(PrivilegedOperation.CGROUP_ARG_PREFIX)
         .append(tasksFile).toString();
     List<PrivilegedOperation> ops = new ArrayList<>();
 

+ 2 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficControlBandwidthHandlerImpl.java

@@ -101,7 +101,7 @@ public class TrafficControlBandwidthHandlerImpl
     containerBandwidthMbit = (int) Math.ceil((double) yarnBandwidthMbit /
         MAX_CONTAINER_COUNT);
 
-    StringBuffer logLine = new StringBuffer("strict mode is set to :")
+    StringBuilder logLine = new StringBuilder("strict mode is set to :")
         .append(strictMode).append(System.lineSeparator());
 
     if (strictMode) {
@@ -152,7 +152,7 @@ public class TrafficControlBandwidthHandlerImpl
     //executable.
     String tasksFile = cGroupsHandler.getPathForCGroupTasks(
         CGroupsHandler.CGroupController.NET_CLS, containerIdStr);
-    String opArg = new StringBuffer(PrivilegedOperation.CGROUP_ARG_PREFIX)
+    String opArg = new StringBuilder(PrivilegedOperation.CGROUP_ARG_PREFIX)
         .append(tasksFile).toString();
     List<PrivilegedOperation> ops = new ArrayList<>();
 

+ 1 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java

@@ -225,7 +225,7 @@ import java.util.regex.Pattern;
       if (pattern.matcher(state).find()) {
         LOG.debug("Matched regex: {}", regex);
       } else {
-        String logLine = new StringBuffer("Failed to match regex: ")
+        String logLine = new StringBuilder("Failed to match regex: ")
               .append(regex).append(" Current state: ").append(state).toString();
         LOG.warn(logLine);
         return false;

Some files were not shown because too many files changed in this diff