Explorar o código

HADOOP-16029. Consecutive StringBuilder.append can be reused. Contributed by Ayush Saxena.

Giovanni Matteo Fumarola %!s(int64=6) %!d(string=hai) anos
pai
achega
fb8932a727
Modificáronse 64 ficheiros con 668 adicións e 673 borrados
  1. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java
  2. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java
  3. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStream.java
  4. 16 16
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
  5. 17 17
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
  6. 15 15
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
  7. 6 6
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java
  8. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java
  9. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java
  10. 4 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java
  11. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
  12. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java
  13. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Find.java
  14. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java
  15. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
  16. 9 9
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
  17. 4 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECSchema.java
  18. 6 6
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
  19. 4 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
  20. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/StatsDSink.java
  21. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java
  22. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
  23. 8 8
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
  24. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java
  25. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java
  26. 6 6
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java
  27. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java
  28. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
  29. 5 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java
  30. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/InterruptEscalator.java
  31. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetGroupsBase.java
  32. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/BlockingThreadPoolExecutorService.java
  33. 6 6
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CpuTimeTracker.java
  34. 4 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SemaphoredDelegatingExecutor.java
  35. 7 7
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
  36. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SignalLogger.java
  37. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/DynamicBloomFilter.java
  38. 6 6
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java
  39. 4 4
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java
  40. 41 42
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java
  41. 2 2
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsPathHandle.java
  42. 6 6
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ReencryptionStatus.java
  43. 5 5
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/StripedBlockUtil.java
  44. 3 3
      hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/resolver/PathLocation.java
  45. 5 5
      hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionContext.java
  46. 2 2
      hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterQuotaUsage.java
  47. 1 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
  48. 1 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeAdminManager.java
  49. 1 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
  50. 9 9
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/VolumeScanner.java
  51. 1 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/PlanCommand.java
  52. 2 5
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java
  53. 10 10
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
  54. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java
  55. 352 352
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
  56. 14 14
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
  57. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java
  58. 5 5
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
  59. 3 3
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java
  60. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java
  61. 8 5
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StoragePolicySummary.java
  62. 3 3
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/ServerCommand.java
  63. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSZKFailoverController.java
  64. 7 7
      hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java

@@ -65,8 +65,8 @@ public enum CipherSuite {
   @Override
   public String toString() {
     StringBuilder builder = new StringBuilder("{");
-    builder.append("name: " + name);
-    builder.append(", algorithmBlockSize: " + algoBlockSize);
+    builder.append("name: " + name)
+        .append(", algorithmBlockSize: " + algoBlockSize);
     if (unknownValue != null) {
       builder.append(", unknownValue: " + unknownValue);
     }

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java

@@ -330,9 +330,9 @@ public class BlockLocation implements Serializable {
   @Override
   public String toString() {
     StringBuilder result = new StringBuilder();
-    result.append(offset);
-    result.append(',');
-    result.append(length);
+    result.append(offset)
+        .append(',')
+        .append(length);
     if (corrupt) {
       result.append("(corrupt)");
     }

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStream.java

@@ -105,8 +105,8 @@ public class FSDataOutputStream extends DataOutputStream
   public String toString() {
     final StringBuilder sb = new StringBuilder(
         "FSDataOutputStream{");
-    sb.append("wrappedStream=").append(wrappedStream);
-    sb.append('}');
+    sb.append("wrappedStream=").append(wrappedStream)
+        .append('}');
     return sb.toString();
   }
 

+ 16 - 16
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java

@@ -115,14 +115,14 @@ public class FileEncryptionInfo implements Serializable {
 
   @Override
   public String toString() {
-    StringBuilder builder = new StringBuilder("{");
-    builder.append("cipherSuite: " + cipherSuite);
-    builder.append(", cryptoProtocolVersion: " + version);
-    builder.append(", edek: " + Hex.encodeHexString(edek));
-    builder.append(", iv: " + Hex.encodeHexString(iv));
-    builder.append(", keyName: " + keyName);
-    builder.append(", ezKeyVersionName: " + ezKeyVersionName);
-    builder.append("}");
+    StringBuilder builder = new StringBuilder("{")
+        .append("cipherSuite: " + cipherSuite)
+        .append(", cryptoProtocolVersion: " + version)
+        .append(", edek: " + Hex.encodeHexString(edek))
+        .append(", iv: " + Hex.encodeHexString(iv))
+        .append(", keyName: " + keyName)
+        .append(", ezKeyVersionName: " + ezKeyVersionName)
+        .append("}");
     return builder.toString();
   }
 
@@ -136,14 +136,14 @@ public class FileEncryptionInfo implements Serializable {
    * Currently this method is used by CLI for backward compatibility.
    */
   public String toStringStable() {
-    StringBuilder builder = new StringBuilder("{");
-    builder.append("cipherSuite: " + cipherSuite);
-    builder.append(", cryptoProtocolVersion: " + version);
-    builder.append(", edek: " + Hex.encodeHexString(edek));
-    builder.append(", iv: " + Hex.encodeHexString(iv));
-    builder.append(", keyName: " + keyName);
-    builder.append(", ezKeyVersionName: " + ezKeyVersionName);
-    builder.append("}");
+    StringBuilder builder = new StringBuilder("{")
+        .append("cipherSuite: " + cipherSuite)
+        .append(", cryptoProtocolVersion: " + version)
+        .append(", edek: " + Hex.encodeHexString(edek))
+        .append(", iv: " + Hex.encodeHexString(iv))
+        .append(", keyName: " + keyName)
+        .append(", ezKeyVersionName: " + ezKeyVersionName)
+        .append("}");
     return builder.toString();
   }
 }

+ 17 - 17
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java

@@ -442,21 +442,21 @@ public class FileStatus implements Writable, Comparable<Object>,
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append(getClass().getSimpleName()); 
-    sb.append("{");
-    sb.append("path=" + path);
-    sb.append("; isDirectory=" + isdir);
+    sb.append(getClass().getSimpleName())
+        .append("{")
+        .append("path=" + path)
+        .append("; isDirectory=" + isdir);
     if(!isDirectory()){
-      sb.append("; length=" + length);
-      sb.append("; replication=" + block_replication);
-      sb.append("; blocksize=" + blocksize);
+      sb.append("; length=" + length)
+          .append("; replication=" + block_replication)
+          .append("; blocksize=" + blocksize);
     }
-    sb.append("; modification_time=" + modification_time);
-    sb.append("; access_time=" + access_time);
-    sb.append("; owner=" + owner);
-    sb.append("; group=" + group);
-    sb.append("; permission=" + permission);
-    sb.append("; isSymlink=" + isSymlink());
+    sb.append("; modification_time=" + modification_time)
+        .append("; access_time=" + access_time)
+        .append("; owner=" + owner)
+        .append("; group=" + group)
+        .append("; permission=" + permission)
+        .append("; isSymlink=" + isSymlink());
     if(isSymlink()) {
       try {
         sb.append("; symlink=" + getSymlink());
@@ -464,10 +464,10 @@ public class FileStatus implements Writable, Comparable<Object>,
         throw new RuntimeException("Unexpected exception", e);
       }
     }
-    sb.append("; hasAcl=" + hasAcl());
-    sb.append("; isEncrypted=" + isEncrypted());
-    sb.append("; isErasureCoded=" + isErasureCoded());
-    sb.append("}");
+    sb.append("; hasAcl=" + hasAcl())
+        .append("; isEncrypted=" + isEncrypted())
+        .append("; isErasureCoded=" + isErasureCoded())
+        .append("}");
     return sb.toString();
   }
 

+ 15 - 15
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java

@@ -367,8 +367,8 @@ public class FileUtil {
           returnVal = false;
       } catch (IOException e) {
         gotException = true;
-        exceptions.append(e.getMessage());
-        exceptions.append("\n");
+        exceptions.append(e.getMessage())
+            .append("\n");
       }
     }
     if (gotException) {
@@ -873,10 +873,10 @@ public class FileUtil {
     if (gzipped) {
       untarCommand.append("gzip -dc | (");
     }
-    untarCommand.append("cd '");
-    untarCommand.append(FileUtil.makeSecureShellPath(untarDir));
-    untarCommand.append("' && ");
-    untarCommand.append("tar -x ");
+    untarCommand.append("cd '")
+        .append(FileUtil.makeSecureShellPath(untarDir))
+        .append("' && ")
+        .append("tar -x ");
 
     if (gzipped) {
       untarCommand.append(")");
@@ -888,14 +888,14 @@ public class FileUtil {
       boolean gzipped) throws IOException {
     StringBuffer untarCommand = new StringBuffer();
     if (gzipped) {
-      untarCommand.append(" gzip -dc '");
-      untarCommand.append(FileUtil.makeSecureShellPath(inFile));
-      untarCommand.append("' | (");
+      untarCommand.append(" gzip -dc '")
+          .append(FileUtil.makeSecureShellPath(inFile))
+          .append("' | (");
     }
-    untarCommand.append("cd '");
-    untarCommand.append(FileUtil.makeSecureShellPath(untarDir));
-    untarCommand.append("' && ");
-    untarCommand.append("tar -xf ");
+    untarCommand.append("cd '")
+        .append(FileUtil.makeSecureShellPath(untarDir))
+        .append("' && ")
+        .append("tar -xf ");
 
     if (gzipped) {
       untarCommand.append(" -)");
@@ -1504,8 +1504,8 @@ public class FileUtil {
             classPathEntryList.add(jar.toUri().toURL().toExternalForm());
           }
         } else {
-          unexpandedWildcardClasspath.append(File.pathSeparator);
-          unexpandedWildcardClasspath.append(classPathEntry);
+          unexpandedWildcardClasspath.append(File.pathSeparator)
+              .append(classPathEntry);
         }
       } else {
         // Append just this entry

+ 6 - 6
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java

@@ -452,12 +452,12 @@ public class Path implements Comparable, Serializable, ObjectInputValidation {
     // illegal characters unescaped in the string, for glob processing, etc.
     StringBuilder buffer = new StringBuilder();
     if (uri.getScheme() != null) {
-      buffer.append(uri.getScheme());
-      buffer.append(":");
+      buffer.append(uri.getScheme())
+          .append(":");
     }
     if (uri.getAuthority() != null) {
-      buffer.append("//");
-      buffer.append(uri.getAuthority());
+      buffer.append("//")
+          .append(uri.getAuthority());
     }
     if (uri.getPath() != null) {
       String path = uri.getPath();
@@ -469,8 +469,8 @@ public class Path implements Comparable, Serializable, ObjectInputValidation {
       buffer.append(path);
     }
     if (uri.getFragment() != null) {
-      buffer.append("#");
-      buffer.append(uri.getFragment());
+      buffer.append("#")
+          .append(uri.getFragment());
     }
     return buffer.toString();
   }

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java

@@ -330,8 +330,8 @@ public class AclEntry {
   public static String aclSpecToString(List<AclEntry> aclSpec) {
     StringBuilder buf = new StringBuilder();
     for ( AclEntry e : aclSpec ) {
-      buf.append(e.toString());
-      buf.append(",");
+      buf.append(e.toString())
+          .append(",");
     }
     return buf.substring(0, buf.length()-1);  // remove last ,
   }

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java

@@ -202,8 +202,8 @@ public class Count extends FsCommand {
       if(!summary.getErasureCodingPolicy().equals("Replicated")){
         outputString.append("EC:");
       }
-      outputString.append(summary.getErasureCodingPolicy());
-      outputString.append(" ");
+      outputString.append(summary.getErasureCodingPolicy())
+          .append(" ");
     }
     outputString.append(src);
     out.println(outputString.toString());

+ 4 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java

@@ -334,10 +334,10 @@ class Ls extends FsCommand {
     }
 
     StringBuilder fmt = new StringBuilder();
-    fmt.append("%s%s"); // permission string
-    fmt.append("%"  + maxRepl  + "s ");
-    fmt.append((maxOwner > 0) ? "%-" + maxOwner + "s " : "%s");
-    fmt.append((maxGroup > 0) ? "%-" + maxGroup + "s " : "%s");
+    fmt.append("%s%s") // permission string
+        .append("%"  + maxRepl  + "s ")
+        .append((maxOwner > 0) ? "%-" + maxOwner + "s " : "%s")
+        .append((maxGroup > 0) ? "%-" + maxGroup + "s " : "%s");
     // Do not use '%-0s' as a formatting conversion, since it will throw a
     // a MissingFormatWidthException if it is used in String.format().
     // http://docs.oracle.com/javase/1.5.0/docs/api/java/util/Formatter.html#intFlags

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java

@@ -484,9 +484,9 @@ public class PathData implements Comparable<PathData> {
       return decodedRemainder;
     } else {
       StringBuilder buffer = new StringBuilder();
-      buffer.append(scheme);
-      buffer.append(":");
-      buffer.append(decodedRemainder);
+      buffer.append(scheme)
+          .append(":")
+          .append(decodedRemainder);
       return buffer.toString();
     }
   }

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java

@@ -110,8 +110,8 @@ public abstract class BaseExpression implements Expression, Configurable {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append(getClass().getSimpleName());
-    sb.append("(");
+    sb.append(getClass().getSimpleName())
+        .append("(");
     boolean firstArg = true;
     for (String arg : getArguments()) {
       if (!firstArg) {

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Find.java

@@ -134,8 +134,8 @@ public class Find extends FsCommand {
     for (String line : HELP) {
       sb.append(line).append("\n");
     }
-    sb.append("\n");
-    sb.append("The following primary expressions are recognised:\n");
+    sb.append("\n")
+        .append("The following primary expressions are recognised:\n");
     for (Expression expr : primaries) {
       for (String line : expr.getUsage()) {
         sb.append("  ").append(line).append("\n");

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java

@@ -220,8 +220,8 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
     StringBuilder buf = new StringBuilder(MD5_LEN*2);
     for (int i = 0; i < MD5_LEN; i++) {
       int b = digest[i];
-      buf.append(HEX_DIGITS[(b >> 4) & 0xf]);
-      buf.append(HEX_DIGITS[b & 0xf]);
+      buf.append(HEX_DIGITS[(b >> 4) & 0xf])
+          .append(HEX_DIGITS[b & 0xf]);
     }
     return buf.toString();
   }

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java

@@ -826,8 +826,8 @@ public class SequenceFile {
         this.theMetadata.entrySet().iterator();
       while (iter.hasNext()) {
         Map.Entry<Text, Text> en = iter.next();
-        sb.append("\t").append(en.getKey().toString()).append("\t").append(en.getValue().toString());
-        sb.append("\n");
+        sb.append("\t").append(en.getKey().toString()).append("\t")
+            .append(en.getValue().toString()).append("\n");
       }
       return sb.toString();
     }

+ 9 - 9
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java

@@ -85,15 +85,15 @@ public class CompressionCodecFactory {
     buf.append("{ ");
     if (itr.hasNext()) {
       Map.Entry<String, CompressionCodec> entry = itr.next();
-      buf.append(entry.getKey());
-      buf.append(": ");
-      buf.append(entry.getValue().getClass().getName());
+      buf.append(entry.getKey())
+          .append(": ")
+          .append(entry.getValue().getClass().getName());
       while (itr.hasNext()) {
         entry = itr.next();
-        buf.append(", ");
-        buf.append(entry.getKey());
-        buf.append(": ");
-        buf.append(entry.getValue().getClass().getName());
+        buf.append(", ")
+            .append(entry.getKey())
+            .append(": ")
+            .append(entry.getValue().getClass().getName());
       }
     }
     buf.append(" }");
@@ -161,8 +161,8 @@ public class CompressionCodecFactory {
       Class cls = itr.next();
       buf.append(cls.getName());
       while(itr.hasNext()) {
-        buf.append(',');
-        buf.append(itr.next().getName());
+        buf.append(',')
+            .append(itr.next().getName());
       }
     }
     conf.set(CommonConfigurationKeys.IO_COMPRESSION_CODECS_KEY, buf.toString());

+ 4 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECSchema.java

@@ -187,10 +187,10 @@ public final class ECSchema implements Serializable {
   public String toString() {
     StringBuilder sb = new StringBuilder("ECSchema=[");
 
-    sb.append("Codec=" + codecName + ", ");
-    sb.append(NUM_DATA_UNITS_KEY + "=" + numDataUnits + ", ");
-    sb.append(NUM_PARITY_UNITS_KEY + "=" + numParityUnits);
-    sb.append((extraOptions.isEmpty() ? "" : ", "));
+    sb.append("Codec=" + codecName + ", ")
+        .append(NUM_DATA_UNITS_KEY + "=" + numDataUnits + ", ")
+        .append(NUM_PARITY_UNITS_KEY + "=" + numParityUnits)
+        .append((extraOptions.isEmpty() ? "" : ", "));
 
     int i = 0;
     for (Map.Entry<String, String> entry : extraOptions.entrySet()) {

+ 6 - 6
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java

@@ -181,17 +181,17 @@ public class WritableRpcEngine implements RpcEngine {
     @Override
     public String toString() {
       StringBuilder buffer = new StringBuilder();
-      buffer.append(methodName);
-      buffer.append("(");
+      buffer.append(methodName)
+          .append("(");
       for (int i = 0; i < parameters.length; i++) {
         if (i != 0)
           buffer.append(", ");
         buffer.append(parameters[i]);
       }
-      buffer.append(")");
-      buffer.append(", rpc version="+rpcVersion);
-      buffer.append(", client version="+clientVersion);
-      buffer.append(", methodsFingerPrint="+clientMethodsHash);
+      buffer.append(")")
+          .append(", rpc version="+rpcVersion)
+          .append(", client version="+clientVersion)
+          .append(", methodsFingerPrint="+clientMethodsHash);
       return buffer.toString();
     }
 

+ 4 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java

@@ -76,10 +76,10 @@ public class GraphiteSink implements MetricsSink, Closeable {
 
         for (MetricsTag tag : record.tags()) {
             if (tag.value() != null) {
-                metricsPathPrefix.append(".");
-                metricsPathPrefix.append(tag.name());
-                metricsPathPrefix.append("=");
-                metricsPathPrefix.append(tag.value());
+                metricsPathPrefix.append(".")
+                    .append(tag.name())
+                    .append("=")
+                    .append(tag.value());
             }
         }
 

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/StatsDSink.java

@@ -122,9 +122,9 @@ public class StatsDSink implements MetricsSink, Closeable {
         buf.append(hn.substring(0, idx)).append(PERIOD);
       }
     }
-    buf.append(sn).append(PERIOD);
-    buf.append(ctx).append(PERIOD);
-    buf.append(record.name().replaceAll("\\.", "-")).append(PERIOD);
+    buf.append(sn).append(PERIOD)
+        .append(ctx).append(PERIOD)
+        .append(record.name().replaceAll("\\.", "-")).append(PERIOD);
 
     // Collect datapoints.
     for (AbstractMetric metric : record.metrics()) {

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java

@@ -124,8 +124,8 @@ public abstract class AbstractDNSToSwitchMapping
             .append("\n");
         switches.add(entry.getValue());
       }
-      builder.append("Nodes: ").append(rack.size()).append("\n");
-      builder.append("Switches: ").append(switches.size()).append("\n");
+      builder.append("Nodes: ").append(rack.size()).append("\n")
+          .append("Switches: ").append(switches.size()).append("\n");
     } else {
       builder.append("No topology information");
     }

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java

@@ -850,8 +850,8 @@ public class NetUtils {
     StringBuilder hostDetails = new StringBuilder(27);
     hostDetails.append("local host is: ")
         .append(quoteHost(localHost))
-        .append("; ");
-    hostDetails.append("destination host is: ").append(quoteHost(destHost))
+        .append("; ")
+        .append("destination host is: ").append(quoteHost(destHost))
         .append(":")
         .append(destPort).append("; ");
     return hostDetails.toString();

+ 8 - 8
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java

@@ -710,18 +710,18 @@ public class NetworkTopology {
   public String toString() {
     // print the number of racks
     StringBuilder tree = new StringBuilder();
-    tree.append("Number of racks: ");
-    tree.append(numOfRacks);
-    tree.append("\n");
+    tree.append("Number of racks: ")
+        .append(numOfRacks)
+        .append("\n");
     // print the number of leaves
     int numOfLeaves = getNumOfLeaves();
-    tree.append("Expected number of leaves:");
-    tree.append(numOfLeaves);
-    tree.append("\n");
+    tree.append("Expected number of leaves:")
+        .append(numOfLeaves)
+        .append("\n");
     // print nodes
     for(int i=0; i<numOfLeaves; i++) {
-      tree.append(NodeBase.getPath(clusterMap.getLeaf(i, null)));
-      tree.append("\n");
+      tree.append(NodeBase.getPath(clusterMap.getLeaf(i, null)))
+          .append("\n");
     }
     return tree.toString();
   }

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java

@@ -81,8 +81,8 @@ public final class ProviderUtils {
     String authority = nestedUri.getAuthority();
     if (authority != null) {
       String[] parts = nestedUri.getAuthority().split("@", 2);
-      result.append(parts[0]);
-      result.append("://");
+      result.append(parts[0])
+          .append("://");
       if (parts.length == 2) {
         result.append(parts[1]);
       }

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java

@@ -63,9 +63,9 @@ public abstract class CredentialProvider {
 
     public String toString() {
       StringBuilder buf = new StringBuilder();
-      buf.append("alias(");
-      buf.append(alias);
-      buf.append(")=");
+      buf.append("alias(")
+          .append(alias)
+          .append(")=");
       if (credential == null) {
         buf.append("null");
       } else {

+ 6 - 6
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java

@@ -121,12 +121,12 @@ public class CredentialShell extends CommandShell {
   public String getCommandUsage() {
     StringBuffer sbuf = new StringBuffer(USAGE_PREFIX + COMMANDS);
     String banner = StringUtils.repeat("=", 66);
-    sbuf.append(banner + "\n");
-    sbuf.append(CreateCommand.USAGE + ":\n\n" + CreateCommand.DESC + "\n");
-    sbuf.append(banner + "\n");
-    sbuf.append(DeleteCommand.USAGE + ":\n\n" + DeleteCommand.DESC + "\n");
-    sbuf.append(banner + "\n");
-    sbuf.append(ListCommand.USAGE + ":\n\n" + ListCommand.DESC + "\n");
+    sbuf.append(banner + "\n")
+        .append(CreateCommand.USAGE + ":\n\n" + CreateCommand.DESC + "\n")
+        .append(banner + "\n")
+        .append(DeleteCommand.USAGE + ":\n\n" + DeleteCommand.DESC + "\n")
+        .append(banner + "\n")
+        .append(ListCommand.USAGE + ":\n\n" + ListCommand.DESC + "\n");
     return sbuf.toString();
   }
 

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java

@@ -295,9 +295,9 @@ public class AccessControlList implements Writable {
       sb.append('*');
     }
     else {
-      sb.append(getUsersString());
-      sb.append(" ");
-      sb.append(getGroupsString());
+      sb.append(getUsersString())
+          .append(" ")
+          .append(getGroupsString());
     }
     return sb.toString();
   }

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java

@@ -417,9 +417,9 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
                 // Don't trim the CN, though!
                 final String cn = StringUtils.toLowerCase(it.next());
                 // Store CN in StringBuffer in case we need to report an error.
-                buf.append(" <");
-                buf.append(cn);
-                buf.append('>');
+            buf.append(" <")
+            .append(cn)
+            .append('>');
                 if (it.hasNext()) {
                     buf.append(" OR");
                 }

+ 5 - 5
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java

@@ -456,11 +456,11 @@ public class Token<T extends TokenIdentifier> implements Writable {
   @Override
   public String toString() {
     StringBuilder buffer = new StringBuilder();
-    buffer.append("Kind: ");
-    buffer.append(kind.toString());
-    buffer.append(", Service: ");
-    buffer.append(service.toString());
-    buffer.append(", Ident: ");
+    buffer.append("Kind: ")
+        .append(kind.toString())
+        .append(", Service: ")
+        .append(service.toString())
+        .append(", Ident: ");
     identifierToString(buffer);
     return buffer.toString();
   }

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/InterruptEscalator.java

@@ -94,9 +94,9 @@ public class InterruptEscalator implements IrqHandler.Interrupted {
     if (owner != null) {
       sb.append(", owner= ").append(owner.toString());
     }
-    sb.append(", shutdownTimeMillis=").append(shutdownTimeMillis);
-    sb.append(", forcedShutdownTimedOut=").append(forcedShutdownTimedOut);
-    sb.append('}');
+    sb.append(", shutdownTimeMillis=").append(shutdownTimeMillis)
+        .append(", forcedShutdownTimedOut=").append(forcedShutdownTimedOut)
+        .append('}');
     return sb.toString();
   }
 

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetGroupsBase.java

@@ -69,8 +69,8 @@ public abstract class GetGroupsBase extends Configured implements Tool {
       StringBuilder sb = new StringBuilder();
       sb.append(username + " :");
       for (String group : getUgmProtocol().getGroupsForUser(username)) {
-        sb.append(" ");
-        sb.append(group);
+        sb.append(" ")
+            .append(group);
       }
       out.println(sb);
     }

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/BlockingThreadPoolExecutorService.java

@@ -161,9 +161,9 @@ public final class BlockingThreadPoolExecutorService
   public String toString() {
     final StringBuilder sb = new StringBuilder(
         "BlockingThreadPoolExecutorService{");
-    sb.append(super.toString());
-    sb.append(", activeCount=").append(getActiveCount());
-    sb.append('}');
+    sb.append(super.toString())
+        .append(", activeCount=").append(getActiveCount())
+        .append('}');
     return sb.toString();
   }
 }

+ 6 - 6
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CpuTimeTracker.java

@@ -106,12 +106,12 @@ public class CpuTimeTracker {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("SampleTime " + this.sampleTime);
-    sb.append(" CummulativeCpuTime " + this.cumulativeCpuTime);
-    sb.append(" LastSampleTime " + this.lastSampleTime);
-    sb.append(" LastCummulativeCpuTime " + this.lastCumulativeCpuTime);
-    sb.append(" CpuUsage " + this.cpuUsage);
-    sb.append(" JiffyLengthMillisec " + this.jiffyLengthInMillis);
+    sb.append("SampleTime " + this.sampleTime)
+        .append(" CummulativeCpuTime " + this.cumulativeCpuTime)
+        .append(" LastSampleTime " + this.lastSampleTime)
+        .append(" LastCummulativeCpuTime " + this.lastCumulativeCpuTime)
+        .append(" CpuUsage " + this.cpuUsage)
+        .append(" JiffyLengthMillisec " + this.jiffyLengthInMillis);
     return sb.toString();
   }
 }

+ 4 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SemaphoredDelegatingExecutor.java

@@ -173,10 +173,10 @@ public class SemaphoredDelegatingExecutor extends
   public String toString() {
     final StringBuilder sb = new StringBuilder(
         "SemaphoredDelegatingExecutor{");
-    sb.append("permitCount=").append(getPermitCount());
-    sb.append(", available=").append(getAvailablePermits());
-    sb.append(", waiting=").append(getWaitingCount());
-    sb.append('}');
+    sb.append("permitCount=").append(getPermitCount())
+        .append(", available=").append(getAvailablePermits())
+        .append(", waiting=").append(getWaitingCount())
+        .append('}');
     return sb.toString();
   }
 

+ 7 - 7
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java

@@ -147,9 +147,9 @@ public abstract class Shell {
    */
   static String bashQuote(String arg) {
     StringBuilder buffer = new StringBuilder(arg.length() + 2);
-    buffer.append('\'');
-    buffer.append(arg.replace("'", "'\\''"));
-    buffer.append('\'');
+    buffer.append('\'')
+        .append(arg.replace("'", "'\\''"))
+        .append('\'');
     return buffer.toString();
   }
 
@@ -964,8 +964,8 @@ public abstract class Shell {
         try {
           String line = errReader.readLine();
           while((line != null) && !isInterrupted()) {
-            errMsg.append(line);
-            errMsg.append(System.getProperty("line.separator"));
+            errMsg.append(line)
+                .append(System.getProperty("line.separator"));
             line = errReader.readLine();
           }
         } catch(IOException ioe) {
@@ -1109,8 +1109,8 @@ public abstract class Shell {
       final StringBuilder sb =
           new StringBuilder("ExitCodeException ");
       sb.append("exitCode=").append(exitCode)
-        .append(": ");
-      sb.append(super.getMessage());
+          .append(": ")
+          .append(super.getMessage());
       return sb.toString();
     }
   }

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SignalLogger.java

@@ -84,8 +84,8 @@ public enum SignalLogger {
     for (String signalName : SIGNALS) {
       try {
         new Handler(signalName, LOG);
-        bld.append(separator);
-        bld.append(signalName);
+        bld.append(separator)
+            .append(signalName);
         separator = ", ";
       } catch (Exception e) {
         LOG.debug(e);

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/DynamicBloomFilter.java

@@ -237,8 +237,8 @@ public class DynamicBloomFilter extends Filter {
     StringBuilder res = new StringBuilder();
 
     for (int i = 0; i < matrix.length; i++) {
-      res.append(matrix[i]);
-      res.append(Character.LINE_SEPARATOR);
+      res.append(matrix[i])
+          .append(Character.LINE_SEPARATOR);
     }
     return res.toString();
   }

+ 6 - 6
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java

@@ -972,19 +972,19 @@ public class DFSInputStream extends FSInputStream
         " No live nodes contain current block ");
     errMsgr.append("Block locations:");
     for (DatanodeInfo datanode : nodes) {
-      errMsgr.append(" ");
-      errMsgr.append(datanode.toString());
+      errMsgr.append(" ")
+          .append(datanode.toString());
     }
     errMsgr.append(" Dead nodes: ");
     for (DatanodeInfo datanode : deadNodes.keySet()) {
-      errMsgr.append(" ");
-      errMsgr.append(datanode.toString());
+      errMsgr.append(" ")
+          .append(datanode.toString());
     }
     if (ignoredNodes != null) {
       errMsgr.append(" Ignored nodes: ");
       for (DatanodeInfo datanode : ignoredNodes) {
-        errMsgr.append(" ");
-        errMsgr.append(datanode.toString());
+        errMsgr.append(" ")
+            .append(datanode.toString());
       }
     }
     return errMsgr.toString();

+ 4 - 4
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java

@@ -519,10 +519,10 @@ public class DFSUtilClient {
     // localhost), then append port
     // TODO : revisit if there is a better way
     StringBuilder sb = new StringBuilder();
-    sb.append(uri.getScheme());
-    sb.append("://");
-    sb.append(uri.getHost());
-    sb.append(":");
+    sb.append(uri.getScheme())
+        .append("://")
+        .append(uri.getHost())
+        .append(":");
     // TODO : currently, only the very first auxiliary port is being used.
     // But actually NN supports running multiple auxiliary
     sb.append(ports[0]);

+ 41 - 42
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java

@@ -374,8 +374,8 @@ public class DatanodeInfo extends DatanodeID implements Node {
     if (lookupName != null) {
       buffer.append(" (").append(lookupName).append(")");
     }
-    buffer.append("\n");
-    buffer.append("Hostname: ").append(getHostName()).append("\n");
+    buffer.append("\n")
+        .append("Hostname: ").append(getHostName()).append("\n");
 
     if (!NetworkTopology.DEFAULT_RACK.equals(location)) {
       buffer.append("Rack: ").append(location).append("\n");
@@ -396,35 +396,34 @@ public class DatanodeInfo extends DatanodeID implements Node {
       buffer.append("Normal\n");
     }
     buffer.append("Configured Capacity: ").append(c).append(" (")
-        .append(StringUtils.byteDesc(c)).append(")").append("\n");
-    buffer.append("DFS Used: ").append(u).append(" (")
-        .append(StringUtils.byteDesc(u)).append(")").append("\n");
-    buffer.append("Non DFS Used: ").append(nonDFSUsed).append(" (")
-        .append(StringUtils.byteDesc(nonDFSUsed)).append(")").append("\n");
-    buffer.append("DFS Remaining: ").append(r).append(" (")
-        .append(StringUtils.byteDesc(r)).append(")").append("\n");
-    buffer.append("DFS Used%: ").append(percent2String(usedPercent))
-        .append("\n");
-    buffer.append("DFS Remaining%: ").append(percent2String(remainingPercent))
-        .append("\n");
-    buffer.append("Configured Cache Capacity: ").append(cc).append(" (")
-        .append(StringUtils.byteDesc(cc)).append(")").append("\n");
-    buffer.append("Cache Used: ").append(cu).append(" (")
-        .append(StringUtils.byteDesc(cu)).append(")").append("\n");
-    buffer.append("Cache Remaining: ").append(cr).append(" (")
-        .append(StringUtils.byteDesc(cr)).append(")").append("\n");
-    buffer.append("Cache Used%: ").append(percent2String(cacheUsedPercent))
-        .append("\n");
-    buffer.append("Cache Remaining%: ")
-        .append(percent2String(cacheRemainingPercent)).append("\n");
-    buffer.append("Xceivers: ").append(getXceiverCount()).append("\n");
-    buffer.append("Last contact: ").append(new Date(lastUpdate)).append("\n");
-    buffer
+        .append(StringUtils.byteDesc(c)).append(")").append("\n")
+        .append("DFS Used: ").append(u).append(" (")
+        .append(StringUtils.byteDesc(u)).append(")").append("\n")
+        .append("Non DFS Used: ").append(nonDFSUsed).append(" (")
+        .append(StringUtils.byteDesc(nonDFSUsed)).append(")").append("\n")
+        .append("DFS Remaining: ").append(r).append(" (")
+        .append(StringUtils.byteDesc(r)).append(")").append("\n")
+        .append("DFS Used%: ").append(percent2String(usedPercent))
+        .append("\n")
+        .append("DFS Remaining%: ").append(percent2String(remainingPercent))
+        .append("\n")
+        .append("Configured Cache Capacity: ").append(cc).append(" (")
+        .append(StringUtils.byteDesc(cc)).append(")").append("\n")
+        .append("Cache Used: ").append(cu).append(" (")
+        .append(StringUtils.byteDesc(cu)).append(")").append("\n")
+        .append("Cache Remaining: ").append(cr).append(" (")
+        .append(StringUtils.byteDesc(cr)).append(")").append("\n")
+        .append("Cache Used%: ").append(percent2String(cacheUsedPercent))
+        .append("\n")
+        .append("Cache Remaining%: ")
+        .append(percent2String(cacheRemainingPercent)).append("\n")
+        .append("Xceivers: ").append(getXceiverCount()).append("\n")
+        .append("Last contact: ").append(new Date(lastUpdate)).append("\n")
         .append("Last Block Report: ")
         .append(
             lastBlockReportTime != 0 ? new Date(lastBlockReportTime) : "Never")
-        .append("\n");
-    buffer.append("Num of Blocks: ").append(blockCount).append("\n");
+        .append("\n")
+        .append("Num of Blocks: ").append(blockCount).append("\n");
     return buffer.toString();
   }
 
@@ -458,20 +457,20 @@ public class DatanodeInfo extends DatanodeID implements Node {
       buffer.append(" IN");
     }
     buffer.append(" ").append(c).append("(").append(StringUtils.byteDesc(c))
-        .append(")");
-    buffer.append(" ").append(u).append("(").append(StringUtils.byteDesc(u))
-        .append(")");
-    buffer.append(" ").append(percent2String(usedPercent));
-    buffer.append(" ").append(r).append("(").append(StringUtils.byteDesc(r))
-        .append(")");
-    buffer.append(" ").append(cc).append("(").append(StringUtils.byteDesc(cc))
-        .append(")");
-    buffer.append(" ").append(cu).append("(").append(StringUtils.byteDesc(cu))
-        .append(")");
-    buffer.append(" ").append(percent2String(cacheUsedPercent));
-    buffer.append(" ").append(cr).append("(").append(StringUtils.byteDesc(cr))
-        .append(")");
-    buffer.append(" ").append(new Date(lastUpdate));
+        .append(")")
+        .append(" ").append(u).append("(").append(StringUtils.byteDesc(u))
+        .append(")")
+        .append(" ").append(percent2String(usedPercent))
+        .append(" ").append(r).append("(").append(StringUtils.byteDesc(r))
+        .append(")")
+        .append(" ").append(cc).append("(").append(StringUtils.byteDesc(cc))
+        .append(")")
+        .append(" ").append(cu).append("(").append(StringUtils.byteDesc(cu))
+        .append(")")
+        .append(" ").append(percent2String(cacheUsedPercent))
+        .append(" ").append(cr).append("(").append(StringUtils.byteDesc(cr))
+        .append(")")
+        .append(" ").append(new Date(lastUpdate));
     return buffer.toString();
   }
 

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsPathHandle.java

@@ -111,8 +111,8 @@ public final class HdfsPathHandle implements PathHandle {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("{ ");
-    sb.append("\"path\" : \"").append(path).append("\"");
+    sb.append("{ ")
+        .append("\"path\" : \"").append(path).append("\"");
     if (inodeId != null) {
       sb.append(",\"inodeId\" : ").append(inodeId);
     }

+ 6 - 6
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ReencryptionStatus.java

@@ -200,12 +200,12 @@ public final class ReencryptionStatus {
     StringBuilder sb = new StringBuilder();
     for (Map.Entry<Long, ZoneReencryptionStatus> entry : zoneStatuses
         .entrySet()) {
-      sb.append("[zone:" + entry.getKey());
-      sb.append(" state:" + entry.getValue().getState());
-      sb.append(" lastProcessed:" + entry.getValue().getLastCheckpointFile());
-      sb.append(" filesReencrypted:" + entry.getValue().getFilesReencrypted());
-      sb.append(" fileReencryptionFailures:" + entry.getValue()
-          .getNumReencryptionFailures() + "]");
+      sb.append("[zone:" + entry.getKey())
+          .append(" state:" + entry.getValue().getState())
+          .append(" lastProcessed:" + entry.getValue().getLastCheckpointFile())
+          .append(" filesReencrypted:" + entry.getValue().getFilesReencrypted())
+          .append(" fileReencryptionFailures:" + entry.getValue()
+              .getNumReencryptionFailures() + "]");
     }
     return sb.toString();
   }

+ 5 - 5
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/StripedBlockUtil.java

@@ -109,11 +109,11 @@ public class StripedBlockUtil {
     @Override
     public String toString() {
       final StringBuilder sb = new StringBuilder();
-      sb.append("bytesRead=").append(bytesRead);
-      sb.append(',');
-      sb.append("isShortCircuit=").append(isShortCircuit);
-      sb.append(',');
-      sb.append("networkDistance=").append(networkDistance);
+      sb.append("bytesRead=").append(bytesRead)
+          .append(',')
+          .append("isShortCircuit=").append(isShortCircuit)
+          .append(',')
+          .append("networkDistance=").append(networkDistance);
       return sb.toString();
     }
   }

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/resolver/PathLocation.java

@@ -162,9 +162,9 @@ public class PathLocation {
       sb.append(nsId + "->" + path);
     }
     if (this.destinations.size() > 1) {
-      sb.append(" [");
-      sb.append(this.destOrder.toString());
-      sb.append("]");
+      sb.append(" [")
+          .append(this.destOrder.toString())
+          .append("]");
     }
     return sb.toString();
   }

+ 5 - 5
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionContext.java

@@ -116,11 +116,11 @@ public class ConnectionContext {
     Class<?> clazz = proxy.getClass();
 
     StringBuilder sb = new StringBuilder();
-    sb.append(clazz.getSimpleName());
-    sb.append("@");
-    sb.append(addr);
-    sb.append("x");
-    sb.append(numThreads);
+    sb.append(clazz.getSimpleName())
+        .append("@")
+        .append(addr)
+        .append("x")
+        .append(numThreads);
     if (closed) {
       sb.append("[CLOSED]");
     }

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterQuotaUsage.java

@@ -110,8 +110,8 @@ public final class RouterQuotaUsage extends QuotaUsage {
 
     StringBuilder str = new StringBuilder();
     str.append("[NsQuota: ").append(nsQuota).append("/")
-        .append(nsCount);
-    str.append(", SsQuota: ").append(ssQuota)
+        .append(nsCount)
+        .append(", SsQuota: ").append(ssQuota)
         .append("/").append(ssCount)
         .append("]");
     return str.toString();

+ 1 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java

@@ -1900,8 +1900,7 @@ public class BlockManager implements BlockStatsMXBean {
         if (targets != null && targets.length != 0) {
           StringBuilder targetList = new StringBuilder("datanode(s)");
           for (DatanodeStorageInfo target : targets) {
-            targetList.append(' ');
-            targetList.append(target.getDatanodeDescriptor());
+            targetList.append(' ').append(target.getDatanodeDescriptor());
           }
           blockLog.debug("BLOCK* ask {} to replicate {} to {}", rw.getSrcNodes(),
               rw.getBlock(), targetList);

+ 1 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeAdminManager.java

@@ -395,8 +395,7 @@ public class DatanodeAdminManager {
     StringBuilder nodeList = new StringBuilder();
     for (DatanodeStorageInfo storage : storages) {
       final DatanodeDescriptor node = storage.getDatanodeDescriptor();
-      nodeList.append(node);
-      nodeList.append(' ');
+      nodeList.append(node).append(' ');
     }
     NameNode.blockStateChangeLog.info(
         "Block: " + block + ", Expected Replicas: "

+ 1 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java

@@ -2323,8 +2323,7 @@ public class DataNode extends ReconfigurableBase
     if (numTargets > 0) {
       StringBuilder xfersBuilder = new StringBuilder();
       for (int i = 0; i < numTargets; i++) {
-        xfersBuilder.append(xferTargets[i]);
-        xfersBuilder.append(" ");
+        xfersBuilder.append(xferTargets[i]).append(" ");
       }
       LOG.info(bpReg + " Starting thread to transfer " + 
                block + " to " + xfersBuilder);                       

+ 9 - 9
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/VolumeScanner.java

@@ -228,15 +228,15 @@ public class VolumeScanner extends Thread {
         " path %s%n", volume.getStorageID(), volume));
     synchronized (stats) {
       p.append(String.format("Bytes verified in last hour       : %57d%n",
-          stats.bytesScannedInPastHour));
-      p.append(String.format("Blocks scanned in current period  : %57d%n",
-          stats.blocksScannedInCurrentPeriod));
-      p.append(String.format("Blocks scanned since restart      : %57d%n",
-          stats.blocksScannedSinceRestart));
-      p.append(String.format("Block pool scans since restart    : %57d%n",
-          stats.scansSinceRestart));
-      p.append(String.format("Block scan errors since restart   : %57d%n",
-          stats.scanErrorsSinceRestart));
+          stats.bytesScannedInPastHour))
+          .append(String.format("Blocks scanned in current period  : %57d%n",
+              stats.blocksScannedInCurrentPeriod))
+          .append(String.format("Blocks scanned since restart      : %57d%n",
+              stats.blocksScannedSinceRestart))
+          .append(String.format("Block pool scans since restart    : %57d%n",
+              stats.scansSinceRestart))
+          .append(String.format("Block scan errors since restart   : %57d%n",
+              stats.scanErrorsSinceRestart));
       if (stats.nextBlockPoolScanStartMs > 0) {
         p.append(String.format("Hours until next block pool scan  : %57.3f%n",
             positiveMsToHours(stats.nextBlockPoolScanStartMs -

+ 1 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/PlanCommand.java

@@ -176,8 +176,7 @@ public class PlanCommand extends Command {
       final String errMsg =
           "Errors while recording the output of plan command.";
       LOG.error(errMsg, e);
-      result.appendln(errMsg);
-      result.appendln(Throwables.getStackTraceAsString(e));
+      result.appendln(errMsg).appendln(Throwables.getStackTraceAsString(e));
     }
 
     getPrintStream().print(result.toString());

+ 2 - 5
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java

@@ -499,11 +499,8 @@ public class EncryptionZoneManager {
         final String srcEZPath = getFullPathName(srcParentEZI.getINodeId());
         final String dstEZPath = getFullPathName(dstParentEZI.getINodeId());
         final StringBuilder sb = new StringBuilder(srcIIP.getPath());
-        sb.append(" can't be moved from encryption zone ");
-        sb.append(srcEZPath);
-        sb.append(" to encryption zone ");
-        sb.append(dstEZPath);
-        sb.append(".");
+        sb.append(" can't be moved from encryption zone ").append(srcEZPath)
+            .append(" to encryption zone ").append(dstEZPath).append(".");
         throw new IOException(sb.toString());
       }
       checkMoveValidityForReencryption(srcIIP.getPath(),

+ 10 - 10
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java

@@ -762,16 +762,16 @@ public class FSEditLog implements LogsPurgeable {
     }
     lastPrintTime = now;
     StringBuilder buf = new StringBuilder();
-    buf.append("Number of transactions: ");
-    buf.append(numTransactions);
-    buf.append(" Total time for transactions(ms): ");
-    buf.append(totalTimeTransactions);
-    buf.append(" Number of transactions batched in Syncs: ");
-    buf.append(numTransactionsBatchedInSync.get());
-    buf.append(" Number of syncs: ");
-    buf.append(editLogStream.getNumSync());
-    buf.append(" SyncTimes(ms): ");
-    buf.append(journalSet.getSyncTimes());
+    buf.append("Number of transactions: ")
+        .append(numTransactions)
+        .append(" Total time for transactions(ms): ")
+        .append(totalTimeTransactions)
+        .append(" Number of transactions batched in Syncs: ")
+        .append(numTransactionsBatchedInSync.get())
+        .append(" Number of syncs: ")
+        .append(editLogStream.getNumSync())
+        .append(" SyncTimes(ms): ")
+        .append(journalSet.getSyncTimes());
     LOG.info(buf.toString());
   }
 

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java

@@ -1055,8 +1055,8 @@ public class FSEditLogLoader {
   private static String formatEditLogReplayError(EditLogInputStream in,
       long recentOpcodeOffsets[], long txid) {
     StringBuilder sb = new StringBuilder();
-    sb.append("Error replaying edit log at offset " + in.getPosition());
-    sb.append(".  Expected transaction ID was ").append(txid);
+    sb.append("Error replaying edit log at offset " + in.getPosition())
+        .append(".  Expected transaction ID was ").append(txid);
     if (recentOpcodeOffsets[0] != -1) {
       Arrays.sort(recentOpcodeOffsets);
       sb.append("\nRecent opcode offsets:");

A diferenza do arquivo foi suprimida porque é demasiado grande
+ 352 - 352
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java


+ 14 - 14
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java

@@ -8031,19 +8031,19 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean,
         src = escapeJava(src);
         dst = escapeJava(dst);
         sb.setLength(0);
-        sb.append("allowed=").append(succeeded).append("\t");
-        sb.append("ugi=").append(userName).append("\t");
-        sb.append("ip=").append(addr).append("\t");
-        sb.append("cmd=").append(cmd).append("\t");
-        sb.append("src=").append(src).append("\t");
-        sb.append("dst=").append(dst).append("\t");
+        sb.append("allowed=").append(succeeded).append("\t")
+            .append("ugi=").append(userName).append("\t")
+            .append("ip=").append(addr).append("\t")
+            .append("cmd=").append(cmd).append("\t")
+            .append("src=").append(src).append("\t")
+            .append("dst=").append(dst).append("\t");
         if (null == status) {
           sb.append("perm=null");
         } else {
-          sb.append("perm=");
-          sb.append(status.getOwner()).append(":");
-          sb.append(status.getGroup()).append(":");
-          sb.append(status.getPermission());
+          sb.append("perm=")
+              .append(status.getOwner()).append(":")
+              .append(status.getGroup()).append(":")
+              .append(status.getPermission());
         }
         if (logTokenTrackingId) {
           sb.append("\t").append("trackingId=");
@@ -8061,8 +8061,8 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean,
           }
           sb.append(trackingId);
         }
-        sb.append("\t").append("proto=");
-        sb.append(Server.getProtocol());
+        sb.append("\t").append("proto=")
+            .append(Server.getProtocol());
         if (isCallerContextEnabled &&
             callerContext != null &&
             callerContext.isContextValid()) {
@@ -8076,8 +8076,8 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean,
           if (callerContext.getSignature() != null &&
               callerContext.getSignature().length > 0 &&
               callerContext.getSignature().length <= callerSignatureMaxLen) {
-            sb.append(":");
-            sb.append(new String(callerContext.getSignature(),
+            sb.append(":")
+                .append(new String(callerContext.getSignature(),
                 CallerContext.SIGNATURE_ENCODING));
           }
         }

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java

@@ -697,8 +697,8 @@ public class JournalSet implements JournalManager {
     StringBuilder buf = new StringBuilder();
     for (JournalAndStream jas : journals) {
       if (jas.isActive()) {
-        buf.append(jas.getCurrentStream().getTotalSyncTime());
-        buf.append(" ");
+        buf.append(jas.getCurrentStream().getTotalSyncTime())
+            .append(" ");
       }
     }
     return buf.toString();

+ 5 - 5
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java

@@ -855,11 +855,11 @@ public class NamenodeFsck implements DataEncryptionKeyFactory {
           block.getLocalBlock());
       DatanodeStorageInfo[] storages = storedBlock
           .getUnderConstructionFeature().getExpectedStorageLocations();
-      report.append('\n');
-      report.append("Under Construction Block:\n");
-      report.append(blockNumber).append(". ").append(blkName);
-      report.append(" len=").append(block.getNumBytes());
-      report.append(" Expected_repl=" + storages.length);
+      report.append('\n')
+          .append("Under Construction Block:\n")
+          .append(blockNumber).append(". ").append(blkName)
+          .append(" len=").append(block.getNumBytes())
+          .append(" Expected_repl=" + storages.length);
       String info=getReplicaInfo(storedBlock);
       if (!info.isEmpty()){
         report.append(" ").append(info);

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java

@@ -54,9 +54,9 @@ public class QuotaByStorageTypeEntry {
    public String toString() {
      StringBuilder sb = new StringBuilder();
      assert (type != null);
-     sb.append(StringUtils.toLowerCase(type.toString()));
-     sb.append(':');
-     sb.append(quota);
+    sb.append(StringUtils.toLowerCase(type.toString()))
+        .append(':')
+        .append(quota);
      return sb.toString();
    }
 

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java

@@ -140,8 +140,8 @@ class RedundantEditLogInputStream extends EditLogInputStream {
     StringBuilder bld = new StringBuilder();
     String prefix = "";
     for (EditLogInputStream elis : streams) {
-      bld.append(prefix);
-      bld.append(elis.getName());
+      bld.append(prefix)
+          .append(elis.getName());
       prefix = ", ";
     }
     return bld.toString();

+ 8 - 5
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StoragePolicySummary.java

@@ -83,8 +83,10 @@ public class StoragePolicySummary {
 
   public String toString() {
     StringBuilder compliantBlocksSB = new StringBuilder();
-    compliantBlocksSB.append("\nBlocks satisfying the specified storage policy:");
-    compliantBlocksSB.append("\nStorage Policy                  # of blocks       % of blocks\n");
+    compliantBlocksSB
+        .append("\nBlocks satisfying the specified storage policy:")
+        .append("\nStorage Policy"
+            + "                  # of blocks       % of blocks\n");
     StringBuilder nonCompliantBlocksSB = new StringBuilder();
     Formatter compliantFormatter = new Formatter(compliantBlocksSB);
     Formatter nonCompliantFormatter = new Formatter(nonCompliantBlocksSB);
@@ -103,9 +105,10 @@ public class StoragePolicySummary {
             percentFormat.format(percent));
       } else {
         if (nonCompliantBlocksSB.length() == 0) {
-          nonCompliantBlocksSB.append("\nBlocks NOT satisfying the specified storage policy:");
-          nonCompliantBlocksSB.append("\nStorage Policy                  ");
-          nonCompliantBlocksSB.append(
+          nonCompliantBlocksSB
+              .append("\nBlocks NOT satisfying the specified storage policy:")
+              .append("\nStorage Policy                  ")
+              .append(
               "Specified Storage Policy      # of blocks       % of blocks\n");
         }
         nonCompliantFormatter.format("%-35s %-20s %10d  %20s%n",

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/ServerCommand.java

@@ -55,9 +55,9 @@ public abstract class ServerCommand {
 
   public String toString() {
     final StringBuilder sb = new StringBuilder();
-    sb.append(getClass().getSimpleName());
-    sb.append("/");
-    sb.append(action);
+    sb.append(getClass().getSimpleName())
+        .append("/")
+        .append(action);
     return sb.toString();
   }
 }

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSZKFailoverController.java

@@ -241,8 +241,8 @@ public class DFSZKFailoverController extends ZKFailoverController {
       IOUtils.copyBytes(conn.getInputStream(), out, 4096, true);
       StringBuilder localNNThreadDumpContent =
           new StringBuilder("-- Local NN thread dump -- \n");
-      localNNThreadDumpContent.append(out);
-      localNNThreadDumpContent.append("\n -- Local NN thread dump -- ");
+      localNNThreadDumpContent.append(out)
+          .append("\n -- Local NN thread dump -- ");
       LOG.info("{}", localNNThreadDumpContent.toString());
       isThreadDumpCaptured = true;
     } catch (IOException e) {

+ 7 - 7
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java

@@ -405,14 +405,14 @@ public final class CopyListingFileStatus implements Writable {
   public String toString() {
     StringBuilder sb = new StringBuilder(super.toString());
     sb.append('{');
-    sb.append(this.getPath() == null ? "" : this.getPath().toString());
-    sb.append(" length = ").append(this.getLen());
-    sb.append(" aclEntries = ").append(aclEntries);
-    sb.append(", xAttrs = ").append(xAttrs);
-    sb.append(", modTime = ").append(modificationTime);
+    sb.append(this.getPath() == null ? "" : this.getPath().toString())
+        .append(" length = ").append(this.getLen())
+        .append(" aclEntries = ").append(aclEntries)
+        .append(", xAttrs = ").append(xAttrs)
+        .append(", modTime = ").append(modificationTime);
     if (isSplit()) {
-      sb.append(", chunkOffset = ").append(this.getChunkOffset());
-      sb.append(", chunkLength = ").append(this.getChunkLength());
+      sb.append(", chunkOffset = ").append(this.getChunkOffset())
+          .append(", chunkLength = ").append(this.getChunkLength());
     }
     sb.append('}');
     return sb.toString();

Algúns arquivos non se mostraron porque demasiados arquivos cambiaron neste cambio