Browse Source

YARN-9287. Consecutive StringBuilder append should be reuse. Contributed by Ayush Saxena.

Giovanni Matteo Fumarola 6 years ago
parent
commit
95372657fc
75 changed files with 575 additions and 554 deletions
  1. 2 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java
  2. 3 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java
  3. 2 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java
  4. 5 5
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeLabel.java
  5. 3 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ReservationId.java
  6. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/Resource.java
  7. 3 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEntityGroupId.java
  8. 7 7
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/FlowActivityEntity.java
  9. 5 5
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/FlowRunEntity.java
  10. 2 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/HAUtil.java
  11. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java
  12. 22 22
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/client/ApiServiceClient.java
  13. 11 11
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/ServiceScheduler.java
  14. 5 6
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Artifact.java
  15. 3 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/BaseResource.java
  16. 27 25
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Component.java
  17. 9 7
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/ConfigFile.java
  18. 6 6
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Configuration.java
  19. 19 15
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Container.java
  20. 5 5
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Error.java
  21. 5 5
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/KerberosPrincipal.java
  22. 16 16
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/PlacementConstraint.java
  23. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/PlacementPolicy.java
  24. 8 6
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/ReadinessCheck.java
  25. 8 8
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Resource.java
  26. 7 7
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/ResourceInformation.java
  27. 32 29
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Service.java
  28. 6 6
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/ServiceStatus.java
  29. 12 12
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/instance/ComponentInstance.java
  30. 2 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/AbstractLauncher.java
  31. 2 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/CoreFileSystem.java
  32. 3 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java
  33. 2 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/ServiceUtils.java
  34. 7 6
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/AMRMClient.java
  35. 2 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
  36. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java
  37. 41 37
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/TopCLI.java
  38. 12 12
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerPBImpl.java
  39. 12 12
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerStatusPBImpl.java
  40. 6 6
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/LocalizationStatusPBImpl.java
  41. 3 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/TokenPBImpl.java
  42. 3 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineV2ClientImpl.java
  43. 14 14
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogToolUtils.java
  44. 3 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/LogAggregationFileController.java
  45. 8 7
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/state/Graph.java
  46. 3 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java
  47. 6 5
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSErrorsAndWarningsPage.java
  48. 3 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/store/records/SubClusterPolicyConfiguration.java
  49. 2 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/utils/FederationStateStoreFacade.java
  50. 2 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/sharedcache/SharedCacheUtil.java
  51. 7 7
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java
  52. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java
  53. 2 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
  54. 2 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java
  55. 3 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/monitor/invariants/MetricsInvariantChecker.java
  56. 5 5
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/NodeAttributesManagerImpl.java
  57. 13 12
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/AbstractResourceUsage.java
  58. 3 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerApplicationAttempt.java
  59. 3 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/activities/ActivityNode.java
  60. 2 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java
  61. 2 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/ParentQueue.java
  62. 9 9
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/QueueCapacities.java
  63. 5 5
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/common/PendingAsk.java
  64. 60 61
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/common/fica/FiCaSchedulerApp.java
  65. 2 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/event/ContainerPreemptEvent.java
  66. 3 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSAppAttempt.java
  67. 6 6
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java
  68. 6 5
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMErrorsAndWarningsPage.java
  69. 5 5
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/main/java/org/apache/hadoop/yarn/server/router/webapp/FederationPage.java
  70. 5 5
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/main/java/org/apache/hadoop/yarn/server/router/webapp/NodesPage.java
  71. 7 7
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java
  72. 23 22
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineSchemaCreator.java
  73. 5 5
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineSchemaUtils.java
  74. 2 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Separator.java
  75. 6 6
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKey.java

+ 2 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java

@@ -127,8 +127,8 @@ public abstract class ApplicationAttemptId implements
     StringBuilder sb = new StringBuilder(64);
     sb.append(APP_ATTEMPT_ID_PREFIX);
     ApplicationId appId = getApplicationId();
-    sb.append(appId.getClusterTimestamp());
-    sb.append('_');
+    sb.append(appId.getClusterTimestamp())
+        .append('_');
     FastNumberFormat.format(sb, appId.getId(), APP_ID_MIN_DIGITS);
     sb.append('_');
     FastNumberFormat.format(sb, getAttemptId(), ATTEMPT_ID_MIN_DIGITS);

+ 3 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java

@@ -98,9 +98,9 @@ public abstract class ApplicationId implements Comparable<ApplicationId> {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder(64);
-    sb.append(APPLICATION_ID_PREFIX);
-    sb.append(getClusterTimestamp());
-    sb.append('_');
+    sb.append(APPLICATION_ID_PREFIX)
+        .append(getClusterTimestamp())
+        .append('_');
     FastNumberFormat.format(sb, getId(), APP_ID_MIN_DIGITS);
     return sb.toString();
   }

+ 2 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java

@@ -172,8 +172,8 @@ public abstract class ContainerId implements Comparable<ContainerId>{
       sb.append('_');
     }
     ApplicationId appId = getApplicationAttemptId().getApplicationId();
-    sb.append(appId.getClusterTimestamp());
-    sb.append('_');
+    sb.append(appId.getClusterTimestamp())
+        .append('_');
     FastNumberFormat.format(sb, appId.getId(), APP_ID_MIN_DIGITS);
     sb.append('_');
     FastNumberFormat.format(sb, getApplicationAttemptId().getAttemptId(),

+ 5 - 5
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeLabel.java

@@ -98,11 +98,11 @@ public abstract class NodeLabel implements Comparable<NodeLabel> {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("<");
-    sb.append(getName());
-    sb.append(":exclusivity=");
-    sb.append(isExclusive());
-    sb.append(">");
+    sb.append("<")
+        .append(getName())
+        .append(":exclusivity=")
+        .append(isExclusive())
+        .append(">");
     return sb.toString();
   }
 

+ 3 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ReservationId.java

@@ -103,9 +103,9 @@ public abstract class ReservationId implements Comparable<ReservationId> {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder(64);
-    sb.append(reserveIdStrPrefix);
-    sb.append(getClusterTimestamp());
-    sb.append('_');
+    sb.append(reserveIdStrPrefix)
+        .append(getClusterTimestamp())
+        .append('_');
     FastNumberFormat.format(sb, getId(), RESERVATION_ID_MIN_DIGITS);
     return sb.toString();
   }

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/Resource.java

@@ -475,10 +475,10 @@ public abstract class Resource implements Comparable<Resource> {
       if (ri.getValue() == 0) {
         continue;
       }
-      sb.append(", ");
-      sb.append(ri.getName()).append(": ")
-          .append(ri.getValue());
-      sb.append(ri.getUnits());
+      sb.append(", ")
+          .append(ri.getName()).append(": ")
+          .append(ri.getValue())
+          .append(ri.getUnits());
     }
 
     sb.append(">");

+ 3 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEntityGroupId.java

@@ -136,9 +136,9 @@ public class TimelineEntityGroupId implements
     StringBuilder sb = new StringBuilder();
     sb.append(TIMELINE_ENTITY_GROUPID_STR_PREFIX + "_");
     ApplicationId appId = getApplicationId();
-    sb.append(appId.getClusterTimestamp()).append("_");
-    sb.append(appId.getId()).append("_");
-    sb.append(getTimelineEntityGroupId());
+    sb.append(appId.getClusterTimestamp()).append("_")
+        .append(appId.getId()).append("_")
+        .append(getTimelineEntityGroupId());
     return sb.toString();
   }
 

+ 7 - 7
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/FlowActivityEntity.java

@@ -77,13 +77,13 @@ public class FlowActivityEntity extends TimelineEntity {
     String id = super.getId();
     if (id == null) {
       StringBuilder sb = new StringBuilder();
-      sb.append(getCluster());
-      sb.append('/');
-      sb.append(getDate().getTime());
-      sb.append('/');
-      sb.append(getUser());
-      sb.append('@');
-      sb.append(getFlowName());
+      sb.append(getCluster())
+          .append('/')
+          .append(getDate().getTime())
+          .append('/')
+          .append(getUser())
+          .append('@')
+          .append(getFlowName());
       id = sb.toString();
       setId(id);
     }

+ 5 - 5
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/FlowRunEntity.java

@@ -63,11 +63,11 @@ public class FlowRunEntity extends HierarchicalTimelineEntity {
     String id = super.getId();
     if (id == null) {
       StringBuilder sb = new StringBuilder();
-      sb.append(getInfo().get(USER_INFO_KEY).toString());
-      sb.append('@');
-      sb.append(getInfo().get(FLOW_NAME_INFO_KEY).toString());
-      sb.append('/');
-      sb.append(getInfo().get(FLOW_RUN_ID_INFO_KEY).toString());
+      sb.append(getInfo().get(USER_INFO_KEY).toString())
+          .append('@')
+          .append(getInfo().get(FLOW_NAME_INFO_KEY).toString())
+          .append('/')
+          .append(getInfo().get(FLOW_RUN_ID_INFO_KEY).toString());
       id = sb.toString();
       setId(id);
     }

+ 2 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/HAUtil.java

@@ -129,8 +129,8 @@ public class HAUtil {
       for (String prefix : YarnConfiguration.getServiceAddressConfKeys(conf)) {
         checkAndSetRMRPCAddress(prefix, id, conf);
       }
-      setValue.append(id);
-      setValue.append(",");
+      setValue.append(id)
+          .append(",");
     }
     conf.set(YarnConfiguration.RM_HA_IDS,
       setValue.substring(0, setValue.length() - 1));

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java

@@ -906,16 +906,16 @@ public class Client {
     for (String c : conf.getStrings(
         YarnConfiguration.YARN_APPLICATION_CLASSPATH,
         YarnConfiguration.DEFAULT_YARN_CROSS_PLATFORM_APPLICATION_CLASSPATH)) {
-      classPathEnv.append(ApplicationConstants.CLASS_PATH_SEPARATOR);
-      classPathEnv.append(c.trim());
+      classPathEnv.append(ApplicationConstants.CLASS_PATH_SEPARATOR)
+          .append(c.trim());
     }
     classPathEnv.append(ApplicationConstants.CLASS_PATH_SEPARATOR).append(
       "./log4j.properties");
 
     // add the runtime classpath needed for tests to work
     if (conf.getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) {
-      classPathEnv.append(':');
-      classPathEnv.append(System.getProperty("java.class.path"));
+      classPathEnv.append(':')
+          .append(System.getProperty("java.class.path"));
     }
 
     env.put("CLASSPATH", classPathEnv.toString());

+ 22 - 22
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/client/ApiServiceClient.java

@@ -105,14 +105,14 @@ public class ApiServiceClient extends AppAdminClient {
         Client client = Client.create();
         client.setFollowRedirects(false);
         StringBuilder sb = new StringBuilder();
-        sb.append(scheme);
-        sb.append(host);
-        sb.append(path);
+        sb.append(scheme)
+            .append(host)
+            .append(path);
         if (!useKerberos) {
           try {
             String username = UserGroupInformation.getCurrentUser().getShortUserName();
-            sb.append("?user.name=");
-            sb.append(username);
+            sb.append("?user.name=")
+                .append(username);
           } catch (IOException e) {
             LOG.debug("Fail to resolve username: {}", e);
           }
@@ -154,11 +154,11 @@ public class ApiServiceClient extends AppAdminClient {
   private String getServicePath(String appName) throws IOException {
     String url = getRMWebAddress();
     StringBuilder api = new StringBuilder();
-    api.append(url);
-    api.append("/app/v1/services");
+    api.append(url)
+        .append("/app/v1/services");
     if (appName != null) {
-      api.append("/");
-      api.append(appName);
+      api.append("/")
+          .append(appName);
     }
     appendUserNameIfRequired(api);
     return api.toString();
@@ -168,8 +168,8 @@ public class ApiServiceClient extends AppAdminClient {
     Preconditions.checkNotNull(appName);
     String url = getRMWebAddress();
     StringBuilder api = new StringBuilder();
-    api.append(url);
-    api.append("/app/v1/services/").append(appName).append("/")
+    api.append(url)
+        .append("/app/v1/services/").append(appName).append("/")
         .append(RestApiConstants.COMP_INSTANCES);
     appendUserNameIfRequired(api);
     return api.toString();
@@ -196,8 +196,8 @@ public class ApiServiceClient extends AppAdminClient {
     Preconditions.checkNotNull(appName);
     String url = getRMWebAddress();
     StringBuilder api = new StringBuilder();
-    api.append(url);
-    api.append("/app/v1/services/").append(appName).append("/")
+    api.append(url)
+        .append("/app/v1/services/").append(appName).append("/")
         .append(RestApiConstants.COMPONENTS);
     appendUserNameIfRequired(api);
     return api.toString();
@@ -528,16 +528,16 @@ public class ApiServiceClient extends AppAdminClient {
           .get(ClientResponse.class);
       if (response.getStatus() == 404) {
         StringBuilder sb = new StringBuilder();
-        sb.append(" Service ");
-        sb.append(appName);
-        sb.append(" not found");
+        sb.append(" Service ")
+            .append(appName)
+            .append(" not found");
         return sb.toString();
       }
       if (response.getStatus() != 200) {
         StringBuilder sb = new StringBuilder();
-        sb.append(appName);
-        sb.append(" Failed : HTTP error code : ");
-        sb.append(response.getStatus());
+        sb.append(appName)
+            .append(" Failed : HTTP error code : ")
+            .append(response.getStatus());
         return sb.toString();
       }
       output = response.getEntity(String.class);
@@ -659,9 +659,9 @@ public class ApiServiceClient extends AppAdminClient {
       ClientResponse response = getApiClient(uri).get(ClientResponse.class);
       if (response.getStatus() != 200) {
         StringBuilder sb = new StringBuilder();
-        sb.append("Failed: HTTP error code: ");
-        sb.append(response.getStatus());
-        sb.append(" ErrorMsg: ").append(response.getEntity(String.class));
+        sb.append("Failed: HTTP error code: ")
+            .append(response.getStatus())
+            .append(" ErrorMsg: ").append(response.getEntity(String.class));
         return sb.toString();
       }
       return response.getEntity(String.class);

+ 11 - 11
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/ServiceScheduler.java

@@ -1112,18 +1112,18 @@ public class ServiceScheduler extends CompositeService {
           } else {
             requestPath.append("http://");
           }
-          requestPath.append(bareHost);
-          requestPath.append(":");
-          requestPath.append(port);
-          requestPath.append("/ws/v1/node/yarn/sysfs/");
-          requestPath.append(UserGroupInformation.getCurrentUser()
-              .getShortUserName());
-          requestPath.append("/");
-          requestPath.append(yarnApp.getId());
+          requestPath.append(bareHost)
+              .append(":")
+              .append(port)
+              .append("/ws/v1/node/yarn/sysfs/")
+              .append(UserGroupInformation.getCurrentUser()
+                  .getShortUserName())
+              .append("/")
+              .append(yarnApp.getId());
           if (!useKerberos) {
-            requestPath.append("?user.name=");
-            requestPath.append(UserGroupInformation.getCurrentUser()
-                .getShortUserName());
+            requestPath.append("?user.name=")
+                .append(UserGroupInformation.getCurrentUser()
+                    .getShortUserName());
           }
           Builder builder = HttpUtil.connect(requestPath.toString());
           ClientResponse response = builder.put(ClientResponse.class, spec);

+ 5 - 6
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Artifact.java

@@ -145,12 +145,11 @@ public class Artifact implements Serializable {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("class Artifact {\n");
-
-    sb.append("    id: ").append(toIndentedString(id)).append("\n");
-    sb.append("    type: ").append(toIndentedString(type)).append("\n");
-    sb.append("    uri: ").append(toIndentedString(uri)).append("\n");
-    sb.append("}");
+    sb.append("class Artifact {\n")
+        .append("    id: ").append(toIndentedString(id)).append("\n")
+        .append("    type: ").append(toIndentedString(type)).append("\n")
+        .append("    uri: ").append(toIndentedString(uri)).append("\n")
+        .append("}");
     return sb.toString();
   }
 

+ 3 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/BaseResource.java

@@ -44,9 +44,9 @@ public class BaseResource implements Serializable {
   @Override
   public String toString() {
     StringBuilder builder = new StringBuilder();
-    builder.append("BaseResource [uri=");
-    builder.append(uri);
-    builder.append("]");
+    builder.append("BaseResource [uri=")
+        .append(uri)
+        .append("]");
     return builder.toString();
   }
 }

+ 27 - 25
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Component.java

@@ -474,31 +474,33 @@ public class Component implements Serializable {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("class Component {\n");
-
-    sb.append("    name: ").append(toIndentedString(name)).append("\n");
-    sb.append("    state: ").append(toIndentedString(state)).append("\n");
-    sb.append("    dependencies: ").append(toIndentedString(dependencies))
-        .append("\n");
-    sb.append("    readinessCheck: ").append(toIndentedString(readinessCheck))
-        .append("\n");
-    sb.append("    artifact: ").append(toIndentedString(artifact)).append("\n");
-    sb.append("    launchCommand: ").append(toIndentedString(launchCommand))
-        .append("\n");
-    sb.append("    resource: ").append(toIndentedString(resource)).append("\n");
-    sb.append("    numberOfContainers: ")
-        .append(toIndentedString(numberOfContainers)).append("\n");
-    sb.append("    containers: ").append(toIndentedString(containers))
-        .append("\n");
-    sb.append("    runPrivilegedContainer: ")
-        .append(toIndentedString(runPrivilegedContainer)).append("\n");
-    sb.append("    placementPolicy: ").append(toIndentedString(placementPolicy))
-        .append("\n");
-    sb.append("    configuration: ").append(toIndentedString(configuration))
-        .append("\n");
-    sb.append("    quicklinks: ").append(toIndentedString(quicklinks))
-        .append("\n");
-    sb.append("}");
+    sb.append("class Component {\n")
+        .append("    name: ").append(toIndentedString(name)).append("\n")
+        .append("    state: ").append(toIndentedString(state)).append("\n")
+        .append("    dependencies: ").append(toIndentedString(dependencies))
+        .append("\n")
+        .append("    readinessCheck: ").append(toIndentedString(readinessCheck))
+        .append("\n")
+        .append("    artifact: ").append(toIndentedString(artifact))
+        .append("\n")
+        .append("    launchCommand: ").append(toIndentedString(launchCommand))
+        .append("\n")
+        .append("    resource: ").append(toIndentedString(resource))
+        .append("\n")
+        .append("    numberOfContainers: ")
+        .append(toIndentedString(numberOfContainers)).append("\n")
+        .append("    containers: ").append(toIndentedString(containers))
+        .append("\n")
+        .append("    runPrivilegedContainer: ")
+        .append(toIndentedString(runPrivilegedContainer)).append("\n")
+        .append("    placementPolicy: ")
+        .append(toIndentedString(placementPolicy))
+        .append("\n")
+        .append("    configuration: ").append(toIndentedString(configuration))
+        .append("\n")
+        .append("    quicklinks: ").append(toIndentedString(quicklinks))
+        .append("\n")
+        .append("}");
     return sb.toString();
   }
 

+ 9 - 7
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/ConfigFile.java

@@ -211,13 +211,15 @@ public class ConfigFile implements Serializable {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("class ConfigFile {\n");
-
-    sb.append("    type: ").append(toIndentedString(type)).append("\n");
-    sb.append("    destFile: ").append(toIndentedString(destFile)).append("\n");
-    sb.append("    srcFile: ").append(toIndentedString(srcFile)).append("\n");
-    sb.append("    properties: ").append(toIndentedString(properties)).append("\n");
-    sb.append("}");
+    sb.append("class ConfigFile {\n")
+
+        .append("    type: ").append(toIndentedString(type)).append("\n")
+        .append("    destFile: ").append(toIndentedString(destFile))
+        .append("\n")
+        .append("    srcFile: ").append(toIndentedString(srcFile)).append("\n")
+        .append("    properties: ").append(toIndentedString(properties))
+        .append("\n")
+        .append("}");
     return sb.toString();
   }
 

+ 6 - 6
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Configuration.java

@@ -173,13 +173,13 @@ public class Configuration implements Serializable {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("class Configuration {\n");
+    sb.append("class Configuration {\n")
 
-    sb.append("    properties: ").append(toIndentedString(properties))
-        .append("\n");
-    sb.append("    env: ").append(toIndentedString(env)).append("\n");
-    sb.append("    files: ").append(toIndentedString(files)).append("\n");
-    sb.append("}");
+        .append("    properties: ").append(toIndentedString(properties))
+        .append("\n")
+        .append("    env: ").append(toIndentedString(env)).append("\n")
+        .append("    files: ").append(toIndentedString(files)).append("\n")
+        .append("}");
     return sb.toString();
   }
 

+ 19 - 15
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Container.java

@@ -310,21 +310,25 @@ public class Container extends BaseResource {
     StringBuilder sb = new StringBuilder();
     sb.append("class Container {\n");
 
-    sb.append("    id: ").append(toIndentedString(id)).append("\n");
-    sb.append("    launchTime: ").append(toIndentedString(launchTime))
-        .append("\n");
-    sb.append("    ip: ").append(toIndentedString(ip)).append("\n");
-    sb.append("    hostname: ").append(toIndentedString(hostname)).append("\n");
-    sb.append("    bareHost: ").append(toIndentedString(bareHost)).append("\n");
-    sb.append("    state: ").append(toIndentedString(state)).append("\n");
-    sb.append("    componentInstanceName: ").append(toIndentedString(
-        componentInstanceName))
-        .append("\n");
-    sb.append("    resource: ").append(toIndentedString(resource)).append("\n");
-    sb.append("    artifact: ").append(toIndentedString(artifact)).append("\n");
-    sb.append("    privilegedContainer: ")
-        .append(toIndentedString(privilegedContainer)).append("\n");
-    sb.append("}");
+    sb.append("    id: ").append(toIndentedString(id)).append("\n")
+        .append("    launchTime: ").append(toIndentedString(launchTime))
+        .append("\n")
+        .append("    ip: ").append(toIndentedString(ip)).append("\n")
+        .append("    hostname: ").append(toIndentedString(hostname))
+        .append("\n")
+        .append("    bareHost: ").append(toIndentedString(bareHost))
+        .append("\n")
+        .append("    state: ").append(toIndentedString(state)).append("\n")
+        .append("    componentInstanceName: ").append(toIndentedString(
+            componentInstanceName))
+        .append("\n")
+        .append("    resource: ").append(toIndentedString(resource))
+        .append("\n")
+        .append("    artifact: ").append(toIndentedString(artifact))
+        .append("\n")
+        .append("    privilegedContainer: ")
+        .append(toIndentedString(privilegedContainer)).append("\n")
+        .append("}");
     return sb.toString();
   }
 

+ 5 - 5
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Error.java

@@ -106,12 +106,12 @@ public class Error {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("class Error {\n");
+    sb.append("class Error {\n")
 
-    sb.append("    code: ").append(toIndentedString(code)).append("\n");
-    sb.append("    message: ").append(toIndentedString(message)).append("\n");
-    sb.append("    fields: ").append(toIndentedString(fields)).append("\n");
-    sb.append("}");
+        .append("    code: ").append(toIndentedString(code)).append("\n")
+        .append("    message: ").append(toIndentedString(message)).append("\n")
+        .append("    fields: ").append(toIndentedString(fields)).append("\n")
+        .append("}");
     return sb.toString();
   }
 

+ 5 - 5
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/KerberosPrincipal.java

@@ -120,12 +120,12 @@ public class KerberosPrincipal implements Serializable {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("class KerberosPrincipal {\n");
+    sb.append("class KerberosPrincipal {\n")
 
-    sb.append("    principalName: ").append(toIndentedString(principalName))
-        .append("\n");
-    sb.append("    keytab: ").append(toIndentedString(keytab)).append("\n");
-    sb.append("}");
+        .append("    principalName: ").append(toIndentedString(principalName))
+        .append("\n")
+        .append("    keytab: ").append(toIndentedString(keytab)).append("\n")
+        .append("}");
     return sb.toString();
   }
 

+ 16 - 16
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/PlacementConstraint.java

@@ -248,22 +248,22 @@ public class PlacementConstraint implements Serializable {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("class PlacementConstraint {\n");
-
-    sb.append("    name: ").append(toIndentedString(name)).append("\n");
-    sb.append("    type: ").append(toIndentedString(type)).append("\n");
-    sb.append("    scope: ").append(toIndentedString(scope)).append("\n");
-    sb.append("    targetTags: ").append(toIndentedString(targetTags))
-        .append("\n");
-    sb.append("    nodeAttributes: ").append(toIndentedString(nodeAttributes))
-        .append("\n");
-    sb.append("    nodePartitions: ").append(toIndentedString(nodePartitions))
-    .append("\n");
-    sb.append("    minCardinality: ").append(toIndentedString(minCardinality))
-        .append("\n");
-    sb.append("    maxCardinality: ").append(toIndentedString(maxCardinality))
-        .append("\n");
-    sb.append("}");
+    sb.append("class PlacementConstraint {\n")
+
+        .append("    name: ").append(toIndentedString(name)).append("\n")
+        .append("    type: ").append(toIndentedString(type)).append("\n")
+        .append("    scope: ").append(toIndentedString(scope)).append("\n")
+        .append("    targetTags: ").append(toIndentedString(targetTags))
+        .append("\n")
+        .append("    nodeAttributes: ").append(toIndentedString(nodeAttributes))
+        .append("\n")
+        .append("    nodePartitions: ").append(toIndentedString(nodePartitions))
+        .append("\n")
+        .append("    minCardinality: ").append(toIndentedString(minCardinality))
+        .append("\n")
+        .append("    maxCardinality: ").append(toIndentedString(maxCardinality))
+        .append("\n")
+        .append("}");
     return sb.toString();
   }
 

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/PlacementPolicy.java

@@ -80,11 +80,11 @@ public class PlacementPolicy implements Serializable {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("class PlacementPolicy {\n");
+    sb.append("class PlacementPolicy {\n")
 
-    sb.append("    constraints: ").append(toIndentedString(constraints))
-        .append("\n");
-    sb.append("}");
+        .append("    constraints: ").append(toIndentedString(constraints))
+        .append("\n")
+        .append("}");
     return sb.toString();
   }
 

+ 8 - 6
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/ReadinessCheck.java

@@ -173,12 +173,14 @@ public class ReadinessCheck implements Serializable {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("class ReadinessCheck {\n");
-
-    sb.append("    type: ").append(toIndentedString(type)).append("\n");
-    sb.append("    properties: ").append(toIndentedString(properties)).append("\n");
-    sb.append("    artifact: ").append(toIndentedString(artifact)).append("\n");
-    sb.append("}");
+    sb.append("class ReadinessCheck {\n")
+
+        .append("    type: ").append(toIndentedString(type)).append("\n")
+        .append("    properties: ").append(toIndentedString(properties))
+        .append("\n")
+        .append("    artifact: ").append(toIndentedString(artifact))
+        .append("\n")
+        .append("}");
     return sb.toString();
   }
 

+ 8 - 8
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Resource.java

@@ -162,14 +162,14 @@ public class Resource extends BaseResource implements Cloneable {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("class Resource {\n");
-
-    sb.append("    profile: ").append(toIndentedString(profile)).append("\n");
-    sb.append("    cpus: ").append(toIndentedString(cpus)).append("\n");
-    sb.append("    memory: ").append(toIndentedString(memory)).append("\n");
-    sb.append("    additional: ").append(
-        toIndentedString(additional)).append("\n");
-    sb.append("}");
+    sb.append("class Resource {\n")
+
+        .append("    profile: ").append(toIndentedString(profile)).append("\n")
+        .append("    cpus: ").append(toIndentedString(cpus)).append("\n")
+        .append("    memory: ").append(toIndentedString(memory)).append("\n")
+        .append("    additional: ").append(
+            toIndentedString(additional)).append("\n")
+        .append("}");
     return sb.toString();
   }
 

+ 7 - 7
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/ResourceInformation.java

@@ -127,13 +127,13 @@ public class ResourceInformation {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("class ResourceInformation {\n");
-    sb.append("    value: ").append(toIndentedString(value)).append("\n");
-    sb.append("    unit: ").append(toIndentedString(unit)).append("\n");
-    sb.append("    attributes: ").append(toIndentedString(attributes))
-        .append("\n");
-    sb.append("    tags: ").append(toIndentedString(tags)).append("\n");
-    sb.append("}");
+    sb.append("class ResourceInformation {\n")
+        .append("    value: ").append(toIndentedString(value)).append("\n")
+        .append("    unit: ").append(toIndentedString(unit)).append("\n")
+        .append("    attributes: ").append(toIndentedString(attributes))
+        .append("\n")
+        .append("    tags: ").append(toIndentedString(tags)).append("\n")
+        .append("}");
     return sb.toString();
   }
 

+ 32 - 29
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Service.java

@@ -424,35 +424,38 @@ public class Service extends BaseResource {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("class Service {\n");
-
-    sb.append("    name: ").append(toIndentedString(name)).append("\n");
-    sb.append("    id: ").append(toIndentedString(id)).append("\n");
-    sb.append("    version: ").append(toIndentedString(version)).append("\n");
-    sb.append("    description: ").append(toIndentedString(description))
-        .append("\n");
-    sb.append("    artifact: ").append(toIndentedString(artifact)).append("\n");
-    sb.append("    resource: ").append(toIndentedString(resource)).append("\n");
-    sb.append("    launchTime: ").append(toIndentedString(launchTime))
-        .append("\n");
-    sb.append("    numberOfRunningContainers: ")
-        .append(toIndentedString(numberOfRunningContainers)).append("\n");
-    sb.append("    lifetime: ").append(toIndentedString(lifetime)).append("\n");
-    sb.append("    components: ").append(toIndentedString(components))
-        .append("\n");
-    sb.append("    configuration: ").append(toIndentedString(configuration))
-        .append("\n");
-    sb.append("    state: ").append(toIndentedString(state)).append("\n");
-    sb.append("    quicklinks: ").append(toIndentedString(quicklinks))
-        .append("\n");
-    sb.append("    queue: ").append(toIndentedString(queue)).append("\n");
-    sb.append("    kerberosPrincipal: ")
-        .append(toIndentedString(kerberosPrincipal)).append("\n");
-    sb.append("    dockerClientConfig: ")
-        .append(toIndentedString(dockerClientConfig)).append("\n");
-    sb.append("    dependencies: ")
-        .append(toIndentedString(dependencies)).append("\n");
-    sb.append("}");
+    sb.append("class Service {\n")
+
+        .append("    name: ").append(toIndentedString(name)).append("\n")
+        .append("    id: ").append(toIndentedString(id)).append("\n")
+        .append("    version: ").append(toIndentedString(version)).append("\n")
+        .append("    description: ").append(toIndentedString(description))
+        .append("\n")
+        .append("    artifact: ").append(toIndentedString(artifact))
+        .append("\n")
+        .append("    resource: ").append(toIndentedString(resource))
+        .append("\n")
+        .append("    launchTime: ").append(toIndentedString(launchTime))
+        .append("\n")
+        .append("    numberOfRunningContainers: ")
+        .append(toIndentedString(numberOfRunningContainers)).append("\n")
+        .append("    lifetime: ").append(toIndentedString(lifetime))
+        .append("\n")
+        .append("    components: ").append(toIndentedString(components))
+        .append("\n")
+        .append("    configuration: ").append(toIndentedString(configuration))
+        .append("\n")
+        .append("    state: ").append(toIndentedString(state)).append("\n")
+        .append("    quicklinks: ").append(toIndentedString(quicklinks))
+        .append("\n")
+        .append("    queue: ").append(toIndentedString(queue)).append("\n")
+        .append("    kerberosPrincipal: ")
+        .append(toIndentedString(kerberosPrincipal)).append("\n")
+        .append("    dockerClientConfig: ")
+        .append(toIndentedString(dockerClientConfig)).append("\n")
+        .append("    dependencies: ")
+        .append(toIndentedString(dependencies)).append("\n")
+        .append("}");
     return sb.toString();
   }
 

+ 6 - 6
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/ServiceStatus.java

@@ -124,13 +124,13 @@ public class ServiceStatus extends BaseResource {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("class ServiceStatus {\n");
+    sb.append("class ServiceStatus {\n")
 
-    sb.append("    diagnostics: ").append(toIndentedString(diagnostics))
-        .append("\n");
-    sb.append("    state: ").append(toIndentedString(state)).append("\n");
-    sb.append("    code: ").append(toIndentedString(code)).append("\n");
-    sb.append("}");
+        .append("    diagnostics: ").append(toIndentedString(diagnostics))
+        .append("\n")
+        .append("    state: ").append(toIndentedString(state)).append("\n")
+        .append("    code: ").append(toIndentedString(code)).append("\n")
+        .append("}");
     return sb.toString();
   }
 

+ 12 - 12
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/instance/ComponentInstance.java

@@ -351,18 +351,18 @@ public class ComponentInstance implements EventHandler<ComponentInstanceEvent>,
       comp.reInsertPendingInstance(compInstance);
 
       StringBuilder builder = new StringBuilder();
-      builder.append(compInstance.getCompInstanceId()).append(": ");
-      builder.append(event.getContainerId()).append(
-          " completed. Reinsert back to pending list and requested ");
-      builder.append("a new container.").append(System.lineSeparator());
-      builder.append(" exitStatus=").append(
-          failureBeforeLaunch || event.getStatus() == null ? null :
-              event.getStatus().getExitStatus());
-      builder.append(", diagnostics=");
-      builder.append(failureBeforeLaunch ?
-          FAILED_BEFORE_LAUNCH_DIAG :
-          (event.getStatus() != null ? event.getStatus().getDiagnostics() :
-              UPGRADE_FAILED));
+      builder.append(compInstance.getCompInstanceId()).append(": ")
+          .append(event.getContainerId()).append(
+              " completed. Reinsert back to pending list and requested ")
+          .append("a new container.").append(System.lineSeparator())
+          .append(" exitStatus=").append(
+              failureBeforeLaunch || event.getStatus() == null ? null :
+                  event.getStatus().getExitStatus())
+          .append(", diagnostics=")
+          .append(failureBeforeLaunch ?
+              FAILED_BEFORE_LAUNCH_DIAG :
+              (event.getStatus() != null ? event.getStatus().getDiagnostics() :
+                  UPGRADE_FAILED));
 
       if (event.getStatus() != null && event.getStatus().getExitStatus() != 0) {
         LOG.error(builder.toString());

+ 2 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/AbstractLauncher.java

@@ -166,8 +166,8 @@ public class AbstractLauncher {
           if (sb.length() > 0) {
             sb.append(",");
           }
-          sb.append(mount.getKey()).append(":");
-          sb.append(mount.getValue()).append(":ro");
+          sb.append(mount.getKey()).append(":")
+              .append(mount.getValue()).append(":ro");
         }
         env.put(ENV_DOCKER_CONTAINER_MOUNTS, sb.toString());
       }

+ 2 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/CoreFileSystem.java

@@ -97,8 +97,8 @@ public class CoreFileSystem {
   public String toString() {
     final StringBuilder sb =
       new StringBuilder("CoreFileSystem{");
-    sb.append("fileSystem=").append(fileSystem.getUri());
-    sb.append('}');
+    sb.append("fileSystem=").append(fileSystem.getUri())
+        .append('}');
     return sb.toString();
   }
 

+ 3 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java

@@ -178,9 +178,9 @@ public class PublishedConfiguration {
   public String toString() {
     final StringBuilder sb =
         new StringBuilder("PublishedConfiguration{");
-    sb.append("description='").append(description).append('\'');
-    sb.append(" entries = ").append(entries.size());
-    sb.append('}');
+    sb.append("description='").append(description).append('\'')
+        .append(" entries = ").append(entries.size())
+        .append('}');
     return sb.toString();
   }
 

+ 2 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/ServiceUtils.java

@@ -213,8 +213,8 @@ public final class ServiceUtils {
       return trailing ? separator : "";
     }
     for (Object o : collection) {
-      b.append(o);
-      b.append(separator);
+      b.append(o)
+          .append(separator);
     }
     int length = separator.length();
     String s = b.toString();

+ 7 - 6
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/AMRMClient.java

@@ -427,12 +427,13 @@ public abstract class AMRMClient<T extends AMRMClient.ContainerRequest> extends
 
     public String toString() {
       StringBuilder sb = new StringBuilder();
-      sb.append("Capability[").append(capability).append("]");
-      sb.append("Priority[").append(priority).append("]");
-      sb.append("AllocationRequestId[").append(allocationRequestId).append("]");
-      sb.append("ExecutionTypeRequest[").append(executionTypeRequest)
-          .append("]");
-      sb.append("Resource Profile[").append(resourceProfile).append("]");
+      sb.append("Capability[").append(capability).append("]")
+          .append("Priority[").append(priority).append("]")
+          .append("AllocationRequestId[").append(allocationRequestId)
+          .append("]")
+          .append("ExecutionTypeRequest[").append(executionTypeRequest)
+          .append("]")
+          .append("Resource Profile[").append(resourceProfile).append("]");
       return sb.toString();
     }
 

+ 2 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java

@@ -1245,8 +1245,8 @@ public class ApplicationCLI extends YarnCLI {
 
   private String getAllValidApplicationStates() {
     StringBuilder sb = new StringBuilder();
-    sb.append("The valid application state can be" + " one of the following: ");
-    sb.append(ALLSTATES_OPTION + ",");
+    sb.append("The valid application state can be" + " one of the following: ")
+        .append(ALLSTATES_OPTION + ",");
     for (YarnApplicationState appState : YarnApplicationState.values()) {
       sb.append(appState + ",");
     }

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java

@@ -292,8 +292,8 @@ public class RMAdminCLI extends HAAdmin {
     if (isHAEnabled) {
       appendHAUsage(summary);
     }
-    summary.append(" [-help [cmd]]");
-    summary.append("\n");
+    summary.append(" [-help [cmd]]")
+        .append("\n");
 
     StringBuilder helpBuilder = new StringBuilder();
     System.out.println(summary);
@@ -513,8 +513,8 @@ public class RMAdminCLI extends HAAdmin {
       StringBuilder sb = new StringBuilder();
       sb.append(username + " :");
       for (String group : adminProtocol.getGroupsForUser(username)) {
-        sb.append(" ");
-        sb.append(group);
+        sb.append(" ")
+            .append(group);
       }
       System.out.println(sb);
     }

+ 41 - 37
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/TopCLI.java

@@ -871,43 +871,47 @@ public class TopCLI extends YarnCLI {
     String currentTime = DateFormatUtils.ISO_8601_EXTENDED_TIME_FORMAT
         .format(now);
 
-    ret.append(CLEAR_LINE);
-    ret.append(limitLineLength(String.format(
-      "YARN top - %s, up %s, %d active users, queue(s): %s%n", currentTime,
-      uptimeStr, queueMetrics.activeUsers, queue), terminalWidth, true));
-
-    ret.append(CLEAR_LINE);
-    ret.append(limitLineLength(String.format(
-      "NodeManager(s): %d total, %d active, %d unhealthy, %d decommissioned,"
-          + " %d lost, %d rebooted%n", nodes.totalNodes, nodes.runningNodes,
-      nodes.unhealthyNodes, nodes.decommissionedNodes, nodes.lostNodes,
-      nodes.rebootedNodes), terminalWidth, true));
-
-    ret.append(CLEAR_LINE);
-    ret.append(limitLineLength(String.format(
-        "Queue(s) Applications: %d running, %d submitted, %d pending,"
-            + " %d completed, %d killed, %d failed%n", queueMetrics.appsRunning,
-        queueMetrics.appsSubmitted, queueMetrics.appsPending,
-        queueMetrics.appsCompleted, queueMetrics.appsKilled,
-        queueMetrics.appsFailed), terminalWidth, true));
-
-    ret.append(CLEAR_LINE);
-    ret.append(limitLineLength(String.format("Queue(s) Mem(GB): %d available,"
-        + " %d allocated, %d pending, %d reserved%n",
-      queueMetrics.availableMemoryGB, queueMetrics.allocatedMemoryGB,
-      queueMetrics.pendingMemoryGB, queueMetrics.reservedMemoryGB),
-      terminalWidth, true));
-
-    ret.append(CLEAR_LINE);
-    ret.append(limitLineLength(String.format("Queue(s) VCores: %d available,"
-        + " %d allocated, %d pending, %d reserved%n",
-      queueMetrics.availableVCores, queueMetrics.allocatedVCores,
-      queueMetrics.pendingVCores, queueMetrics.reservedVCores), terminalWidth,
-      true));
-
-    ret.append(CLEAR_LINE);
-    ret.append(limitLineLength(String.format(
-        "Queue(s) Containers: %d allocated, %d pending, %d reserved%n",
+    ret.append(CLEAR_LINE)
+        .append(limitLineLength(String.format(
+            "YARN top - %s, up %s, %d active users, queue(s): %s%n",
+            currentTime, uptimeStr, queueMetrics.activeUsers, queue),
+            terminalWidth, true));
+
+    ret.append(CLEAR_LINE)
+        .append(limitLineLength(String.format(
+            "NodeManager(s)"
+                + ": %d total, %d active, %d unhealthy, %d decommissioned,"
+                + " %d lost, %d rebooted%n",
+            nodes.totalNodes, nodes.runningNodes, nodes.unhealthyNodes,
+            nodes.decommissionedNodes, nodes.lostNodes,
+            nodes.rebootedNodes), terminalWidth, true));
+
+    ret.append(CLEAR_LINE)
+        .append(limitLineLength(String.format(
+            "Queue(s) Applications: %d running, %d submitted, %d pending,"
+                + " %d completed, %d killed, %d failed%n",
+            queueMetrics.appsRunning, queueMetrics.appsSubmitted,
+            queueMetrics.appsPending, queueMetrics.appsCompleted,
+            queueMetrics.appsKilled, queueMetrics.appsFailed), terminalWidth,
+            true));
+
+    ret.append(CLEAR_LINE)
+        .append(limitLineLength(String.format("Queue(s) Mem(GB): %d available,"
+            + " %d allocated, %d pending, %d reserved%n",
+            queueMetrics.availableMemoryGB, queueMetrics.allocatedMemoryGB,
+            queueMetrics.pendingMemoryGB, queueMetrics.reservedMemoryGB),
+            terminalWidth, true));
+
+    ret.append(CLEAR_LINE)
+        .append(limitLineLength(String.format("Queue(s) VCores: %d available,"
+            + " %d allocated, %d pending, %d reserved%n",
+            queueMetrics.availableVCores, queueMetrics.allocatedVCores,
+            queueMetrics.pendingVCores, queueMetrics.reservedVCores),
+            terminalWidth, true));
+
+    ret.append(CLEAR_LINE)
+        .append(limitLineLength(String.format(
+            "Queue(s) Containers: %d allocated, %d pending, %d reserved%n",
             queueMetrics.allocatedContainers, queueMetrics.pendingContainers,
             queueMetrics.reservedContainers), terminalWidth, true));
     return ret.toString();

+ 12 - 12
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerPBImpl.java

@@ -404,18 +404,18 @@ public class ContainerPBImpl extends Container {
 
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("Container: [");
-    sb.append("ContainerId: ").append(getId()).append(", ");
-    sb.append("AllocationRequestId: ").append(getAllocationRequestId())
-        .append(", ");
-    sb.append("Version: ").append(getVersion()).append(", ");
-    sb.append("NodeId: ").append(getNodeId()).append(", ");
-    sb.append("NodeHttpAddress: ").append(getNodeHttpAddress()).append(", ");
-    sb.append("Resource: ").append(getResource()).append(", ");
-    sb.append("Priority: ").append(getPriority()).append(", ");
-    sb.append("Token: ").append(getContainerToken()).append(", ");
-    sb.append("ExecutionType: ").append(getExecutionType()).append(", ");
-    sb.append("]");
+    sb.append("Container: [")
+        .append("ContainerId: ").append(getId()).append(", ")
+        .append("AllocationRequestId: ").append(getAllocationRequestId())
+        .append(", ")
+        .append("Version: ").append(getVersion()).append(", ")
+        .append("NodeId: ").append(getNodeId()).append(", ")
+        .append("NodeHttpAddress: ").append(getNodeHttpAddress()).append(", ")
+        .append("Resource: ").append(getResource()).append(", ")
+        .append("Priority: ").append(getPriority()).append(", ")
+        .append("Token: ").append(getContainerToken()).append(", ")
+        .append("ExecutionType: ").append(getExecutionType()).append(", ")
+        .append("]");
     return sb.toString();
   }
 

+ 12 - 12
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerStatusPBImpl.java

@@ -90,18 +90,18 @@ public class ContainerStatusPBImpl extends ContainerStatus {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("ContainerStatus: [");
-    sb.append("ContainerId: ").append(getContainerId()).append(", ");
-    sb.append("ExecutionType: ").append(getExecutionType()).append(", ");
-    sb.append("State: ").append(getState()).append(", ");
-    sb.append("Capability: ").append(getCapability()).append(", ");
-    sb.append("Diagnostics: ").append(getDiagnostics()).append(", ");
-    sb.append("ExitStatus: ").append(getExitStatus()).append(", ");
-    sb.append("IP: ").append(getIPs()).append(", ");
-    sb.append("Host: ").append(getHost()).append(", ");
-    sb.append("ExposedPorts: ").append(getExposedPorts()).append(", ");
-    sb.append("ContainerSubState: ").append(getContainerSubState());
-    sb.append("]");
+    sb.append("ContainerStatus: [")
+        .append("ContainerId: ").append(getContainerId()).append(", ")
+        .append("ExecutionType: ").append(getExecutionType()).append(", ")
+        .append("State: ").append(getState()).append(", ")
+        .append("Capability: ").append(getCapability()).append(", ")
+        .append("Diagnostics: ").append(getDiagnostics()).append(", ")
+        .append("ExitStatus: ").append(getExitStatus()).append(", ")
+        .append("IP: ").append(getIPs()).append(", ")
+        .append("Host: ").append(getHost()).append(", ")
+        .append("ExposedPorts: ").append(getExposedPorts()).append(", ")
+        .append("ContainerSubState: ").append(getContainerSubState())
+        .append("]");
     return sb.toString();
   }
 

+ 6 - 6
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/LocalizationStatusPBImpl.java

@@ -77,12 +77,12 @@ public class LocalizationStatusPBImpl extends LocalizationStatus {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("LocalizationStatus: [");
-    sb.append("ResourceKey: ").append(getResourceKey()).append(", ");
-    sb.append("LocalizationState: ").append(getLocalizationState())
-        .append(", ");
-    sb.append("Diagnostics: ").append(getDiagnostics()).append(", ");
-    sb.append("]");
+    sb.append("LocalizationStatus: [")
+        .append("ResourceKey: ").append(getResourceKey()).append(", ")
+        .append("LocalizationState: ").append(getLocalizationState())
+        .append(", ")
+        .append("Diagnostics: ").append(getDiagnostics()).append(", ")
+        .append("]");
     return sb.toString();
   }
 

+ 3 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/TokenPBImpl.java

@@ -184,9 +184,9 @@ public class TokenPBImpl extends Token {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("Token { ");
-    sb.append("kind: ").append(getKind()).append(", ");
-    sb.append("service: ").append(getService()).append(" }");
+    sb.append("Token { ")
+        .append("kind: ").append(getKind()).append(", ")
+        .append("service: ").append(getService()).append(" }");
     return sb.toString();
   }
 }  

+ 3 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineV2ClientImpl.java

@@ -276,9 +276,9 @@ public class TimelineV2ClientImpl extends TimelineV2Client {
     } else {
       StringBuilder msg =
           new StringBuilder("TimelineClient has reached to max retry times : ");
-      msg.append(this.maxServiceRetries);
-      msg.append(" for service address: ");
-      msg.append(timelineServiceAddress);
+      msg.append(this.maxServiceRetries)
+          .append(" for service address: ")
+          .append(timelineServiceAddress);
       LOG.error(msg.toString());
       throw new IOException(msg.toString(), e);
     }

+ 14 - 14
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogToolUtils.java

@@ -89,13 +89,13 @@ public final class LogToolUtils {
       String containerStr = String.format(
           LogToolUtils.CONTAINER_ON_NODE_PATTERN,
           containerId, nodeId);
-      sb.append(containerStr + "\n");
-      sb.append("LogAggregationType: " + logType + "\n");
-      sb.append(StringUtils.repeat("=", containerStr.length()) + "\n");
-      sb.append("LogType:" + fileName + "\n");
-      sb.append("LogLastModifiedTime:" + lastModifiedTime + "\n");
-      sb.append("LogLength:" + Long.toString(fileLength) + "\n");
-      sb.append("LogContents:\n");
+      sb.append(containerStr + "\n")
+          .append("LogAggregationType: " + logType + "\n")
+          .append(StringUtils.repeat("=", containerStr.length()) + "\n")
+          .append("LogType:" + fileName + "\n")
+          .append("LogLastModifiedTime:" + lastModifiedTime + "\n")
+          .append("LogLength:" + Long.toString(fileLength) + "\n")
+          .append("LogContents:\n");
       byte[] b = sb.toString().getBytes(
           Charset.forName("UTF-8"));
       os.write(b, 0, b.length);
@@ -138,13 +138,13 @@ public final class LogToolUtils {
       String containerStr = String.format(
           LogToolUtils.CONTAINER_ON_NODE_PATTERN,
           containerId, nodeId);
-      sb.append(containerStr + "\n");
-      sb.append("LogAggregationType: " + logType + "\n");
-      sb.append(StringUtils.repeat("=", containerStr.length()) + "\n");
-      sb.append("LogType:" + fileName + "\n");
-      sb.append("LogLastModifiedTime:" + lastModifiedTime + "\n");
-      sb.append("LogLength:" + Long.toString(fileLength) + "\n");
-      sb.append("LogContents:\n");
+      sb.append(containerStr + "\n")
+          .append("LogAggregationType: " + logType + "\n")
+          .append(StringUtils.repeat("=", containerStr.length()) + "\n")
+          .append("LogType:" + fileName + "\n")
+          .append("LogLastModifiedTime:" + lastModifiedTime + "\n")
+          .append("LogLength:" + Long.toString(fileLength) + "\n")
+          .append("LogContents:\n");
       byte[] b = sb.toString().getBytes(
           Charset.forName("UTF-8"));
       os.write(b, 0, b.length);

+ 3 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/LogAggregationFileController.java

@@ -531,9 +531,9 @@ public abstract class LogAggregationFileController {
   protected String aggregatedLogSuffix(String fileName) {
     StringBuilder sb = new StringBuilder();
     String endOfFile = "End of LogType:" + fileName;
-    sb.append("\n" + endOfFile + "\n");
-    sb.append(StringUtils.repeat("*", endOfFile.length() + 50)
-        + "\n\n");
+    sb.append("\n" + endOfFile + "\n")
+        .append(StringUtils.repeat("*", endOfFile.length() + 50)
+            + "\n\n");
     return sb.toString();
   }
 

+ 8 - 7
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/state/Graph.java

@@ -150,18 +150,19 @@ public class Graph {
   public String generateGraphViz(String indent) {
     StringBuilder sb = new StringBuilder();
     if (this.parent == null) {
-      sb.append("digraph " + name + " {\n");
-      sb.append(String.format("graph [ label=%s, fontsize=24, fontname=Helvetica];%n",
-          wrapSafeString(name)));
-      sb.append("node [fontsize=12, fontname=Helvetica];\n");
-      sb.append("edge [fontsize=9, fontcolor=blue, fontname=Arial];\n");
+      sb.append("digraph " + name + " {\n")
+          .append(String.format(
+              "graph [ label=%s, fontsize=24, fontname=Helvetica];%n",
+              wrapSafeString(name)))
+          .append("node [fontsize=12, fontname=Helvetica];\n")
+          .append("edge [fontsize=9, fontcolor=blue, fontname=Arial];\n");
     } else {
       sb.append("subgraph cluster_" + name + " {\nlabel=\"" + name + "\"\n");
     }
     for (Graph g : subgraphs) {
       String ginfo = g.generateGraphViz(indent+"  ");
-      sb.append(ginfo);
-      sb.append("\n");
+      sb.append(ginfo)
+          .append("\n");
     }
     for (Node n : nodes) {
       sb.append(String.format(

+ 3 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java

@@ -979,9 +979,9 @@ public class ProcfsBasedProcessTree extends ResourceCalculatorProcessTree {
 
     public String toString() {
       StringBuilder sb = new StringBuilder();
-      sb.append("\t").append(this.getName()).append("\n");
-      sb.append("\t").append(MemInfo.SIZE.name + ":" + this.getSize())
-        .append(" kB\n");
+      sb.append("\t").append(this.getName()).append("\n")
+          .append("\t").append(MemInfo.SIZE.name + ":" + this.getSize())
+          .append(" kB\n");
       sb.append("\t").append(MemInfo.PSS.name + ":" + this.getPss())
         .append(" kB\n");
       sb.append("\t").append(MemInfo.RSS.name + ":" + this.getRss())

+ 6 - 5
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSErrorsAndWarningsPage.java

@@ -47,11 +47,12 @@ public class AHSErrorsAndWarningsPage extends AHSView {
 
   private String tablesInit() {
     StringBuilder b = tableInit().append(", aoColumnDefs: [");
-    b.append("{'sType': 'string', 'aTargets': [ 0 ]}");
-    b.append(", {'sType': 'string', 'bSearchable': true, 'aTargets': [ 1 ]}");
-    b.append(", {'sType': 'numeric', 'bSearchable': false, 'aTargets': [ 2 ]}");
-    b.append(", {'sType': 'date', 'aTargets': [ 3 ] }]");
-    b.append(", aaSorting: [[3, 'desc']]}");
+    b.append("{'sType': 'string', 'aTargets': [ 0 ]}")
+        .append(", {'sType': 'string', 'bSearchable': true, 'aTargets': [ 1 ]}")
+        .append(
+            ", {'sType': 'numeric', 'bSearchable': false, 'aTargets': [ 2 ]}")
+        .append(", {'sType': 'date', 'aTargets': [ 3 ] }]")
+        .append(", aaSorting: [[3, 'desc']]}");
     return b.toString();
   }
 }

+ 3 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/store/records/SubClusterPolicyConfiguration.java

@@ -154,9 +154,9 @@ public abstract class SubClusterPolicyConfiguration {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append(getType());
-    sb.append(" : ");
-    sb.append(getParams());
+    sb.append(getType())
+        .append(" : ")
+        .append(getParams());
     return sb.toString();
   }
 }

+ 2 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/utils/FederationStateStoreFacade.java

@@ -516,8 +516,8 @@ public final class FederationStateStoreFacade {
   protected String buildCacheKey(String typeName, String methodName,
       String argName) {
     StringBuilder buffer = new StringBuilder();
-    buffer.append(typeName).append(".");
-    buffer.append(methodName);
+    buffer.append(typeName).append(".")
+        .append(methodName);
     if (argName != null) {
       buffer.append("::");
       buffer.append(argName);

+ 2 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/sharedcache/SharedCacheUtil.java

@@ -71,8 +71,8 @@ public class SharedCacheUtil {
     // SHARED_CACHE_ROOT/3/c/4/3c4f
     StringBuilder sb = new StringBuilder(cacheRoot);
     for (int i = 0; i < cacheDepth; i++) {
-      sb.append(Path.SEPARATOR_CHAR);
-      sb.append(checksum.charAt(i));
+      sb.append(Path.SEPARATOR_CHAR)
+          .append(checksum.charAt(i));
     }
     sb.append(Path.SEPARATOR_CHAR).append(checksum);
 

+ 7 - 7
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java

@@ -331,17 +331,17 @@ public class DefaultContainerExecutor extends ContainerExecutor {
             + containerId + " and exit code: " + exitCode , e);
 
         StringBuilder builder = new StringBuilder();
-        builder.append("Exception from container-launch.\n");
-        builder.append("Container id: ").append(containerId).append("\n");
-        builder.append("Exit code: ").append(exitCode).append("\n");
+        builder.append("Exception from container-launch.\n")
+            .append("Container id: ").append(containerId).append("\n")
+            .append("Exit code: ").append(exitCode).append("\n");
         if (!Optional.fromNullable(e.getMessage()).or("").isEmpty()) {
-          builder.append("Exception message: ");
-          builder.append(e.getMessage()).append("\n");
+          builder.append("Exception message: ")
+              .append(e.getMessage()).append("\n");
         }
 
         if (!shExec.getOutput().isEmpty()) {
-          builder.append("Shell output: ");
-          builder.append(shExec.getOutput()).append("\n");
+          builder.append("Shell output: ")
+              .append(shExec.getOutput()).append("\n");
         }
         String diagnostics = builder.toString();
         logOutput(diagnostics);

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java

@@ -597,10 +597,10 @@ public class LinuxContainerExecutor extends ContainerExecutor {
           + containerId + " and exit code: " + exitCode, e);
 
       StringBuilder builder = new StringBuilder();
-      builder.append("Exception from container-launch.\n");
-      builder.append("Container id: " + containerId + "\n");
-      builder.append("Exit code: " + exitCode + "\n");
-      builder.append("Exception message: " + e.getMessage() + "\n");
+      builder.append("Exception from container-launch.\n")
+          .append("Container id: " + containerId + "\n")
+          .append("Exit code: " + exitCode + "\n")
+          .append("Exception message: " + e.getMessage() + "\n");
       if (!Optional.fromNullable(e.getErrorOutput()).or("").isEmpty()) {
         builder.append("Shell error output: " + e.getErrorOutput() + "\n");
       }

+ 2 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java

@@ -779,8 +779,8 @@ public class ClientRMService extends AbstractService implements
     String diagnostics = org.apache.commons.lang3.StringUtils
         .trimToNull(request.getDiagnostics());
     if (diagnostics != null) {
-      message.append(" with diagnostic message: ");
-      message.append(diagnostics);
+      message.append(" with diagnostic message: ")
+          .append(diagnostics);
     }
 
     this.rmContext.getDispatcher().getEventHandler()

+ 2 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java

@@ -352,8 +352,8 @@ public class AMLauncher implements Runnable {
         LOG.info("Error cleaning master ", ie);
       } catch (YarnException e) {
         StringBuilder sb = new StringBuilder("Container ");
-        sb.append(masterContainer.getId().toString());
-        sb.append(" is not handled by this NodeManager");
+        sb.append(masterContainer.getId().toString())
+            .append(" is not handled by this NodeManager");
         if (!e.getMessage().contains(sb.toString())) {
           // Ignoring if container is already killed by Node Manager.
           LOG.info("Error cleaning master ", e);

+ 3 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/monitor/invariants/MetricsInvariantChecker.java

@@ -127,9 +127,9 @@ public class MetricsInvariantChecker extends InvariantsChecker {
         }
 
         invariants.put(inv, scriptEngine.compile(inv));
-        sb.append(" (");
-        sb.append(inv);
-        sb.append(") ");
+        sb.append(" (")
+            .append(inv)
+            .append(") ");
       }
 
       // create a single large combined invariant for speed of checking

+ 5 - 5
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/NodeAttributesManagerImpl.java

@@ -194,11 +194,11 @@ public class NodeAttributesManagerImpl extends NodeAttributesManager {
         default:
           break;
         }
-        logMsg.append(" NM = ");
-        logMsg.append(entry.getKey());
-        logMsg.append(", attributes=[ ");
-        logMsg.append(StringUtils.join(entry.getValue().keySet(), ","));
-        logMsg.append("] ,");
+        logMsg.append(" NM = ")
+            .append(entry.getKey())
+            .append(", attributes=[ ")
+            .append(StringUtils.join(entry.getValue().keySet(), ","))
+            .append("] ,");
       }
 
       if (LOG.isDebugEnabled()) {

+ 13 - 12
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/AbstractResourceUsage.java

@@ -56,15 +56,15 @@ public class AbstractResourceUsage {
   }
 
   /**
-   * Use enum here to make implementation more cleaner and readable.
-   * Indicates array index for each resource usage type.
+   * Use enum here to make implementation more cleaner and readable. Indicates
+   * array index for each resource usage type.
    */
   public enum ResourceType {
     // CACHED_USED and CACHED_PENDING may be read by anyone, but must only
     // be written by ordering policies
     USED(0), PENDING(1), AMUSED(2), RESERVED(3), CACHED_USED(4), CACHED_PENDING(
-        5), AMLIMIT(6), MIN_RESOURCE(7), MAX_RESOURCE(8), EFF_MIN_RESOURCE(
-            9), EFF_MAX_RESOURCE(10), USERAMLIMIT(11);
+        5), AMLIMIT(6), MIN_RESOURCE(7), MAX_RESOURCE(
+            8), EFF_MIN_RESOURCE(9), EFF_MAX_RESOURCE(10), USERAMLIMIT(11);
 
     private int idx;
 
@@ -94,13 +94,14 @@ public class AbstractResourceUsage {
     @Override
     public String toString() {
       StringBuilder sb = new StringBuilder();
-      sb.append("{used=" + resArr.get(ResourceType.USED.idx) + ", ");
-      sb.append("pending=" + resArr.get(ResourceType.PENDING.idx) + ", ");
-      sb.append("am_used=" + resArr.get(ResourceType.AMUSED.idx) + ", ");
-      sb.append("reserved=" + resArr.get(ResourceType.RESERVED.idx) + ", ");
-      sb.append("min_eff=" + resArr.get(ResourceType.EFF_MIN_RESOURCE.idx) + ", ");
-      sb.append(
-          "max_eff=" + resArr.get(ResourceType.EFF_MAX_RESOURCE.idx) + "}");
+      sb.append("{used=" + resArr.get(ResourceType.USED.idx) + ", ")
+          .append("pending=" + resArr.get(ResourceType.PENDING.idx) + ", ")
+          .append("am_used=" + resArr.get(ResourceType.AMUSED.idx) + ", ")
+          .append("reserved=" + resArr.get(ResourceType.RESERVED.idx) + ", ")
+          .append(
+              "min_eff=" + resArr.get(ResourceType.EFF_MIN_RESOURCE.idx) + ", ")
+          .append(
+              "max_eff=" + resArr.get(ResourceType.EFF_MAX_RESOURCE.idx) + "}");
       return sb.toString();
     }
   }
@@ -134,7 +135,7 @@ public class AbstractResourceUsage {
       readLock.lock();
       Resource allOfType = Resources.createResource(0);
       for (Map.Entry<String, UsageByLabel> usageEntry : usages.entrySet()) {
-        //all usages types are initialized
+        // all usages types are initialized
         Resources.addTo(allOfType, usageEntry.getValue().resArr.get(type.idx));
       }
       return allOfType;

+ 3 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerApplicationAttempt.java

@@ -1357,9 +1357,9 @@ public class SchedulerApplicationAttempt implements SchedulableEntity {
       return;
     }
     StringBuilder diagnosticMessageBldr = new StringBuilder();
-    diagnosticMessageBldr.append("[");
-    diagnosticMessageBldr.append(fdf.format(System.currentTimeMillis()));
-    diagnosticMessageBldr.append("] ");
+    diagnosticMessageBldr.append("[")
+        .append(fdf.format(System.currentTimeMillis()))
+        .append("] ");
     switch (state) {
     case INACTIVATED:
       diagnosticMessageBldr.append(state.diagnosticMessage);

+ 3 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/activities/ActivityNode.java

@@ -94,9 +94,9 @@ public class ActivityNode {
 
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append(this.activityNodeName + " ");
-    sb.append(this.appPriority + " ");
-    sb.append(this.state + " ");
+    sb.append(this.activityNodeName + " ")
+        .append(this.appPriority + " ")
+        .append(this.state + " ");
     if (!this.diagnostic.equals("")) {
       sb.append(this.diagnostic + "\n");
     }

+ 2 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java

@@ -249,8 +249,8 @@ public class LeafQueue extends AbstractCSQueue {
       StringBuilder labelStrBuilder = new StringBuilder();
       if (accessibleLabels != null) {
         for (String s : accessibleLabels) {
-          labelStrBuilder.append(s);
-          labelStrBuilder.append(",");
+          labelStrBuilder.append(s)
+              .append(",");
         }
       }
 

+ 2 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/ParentQueue.java

@@ -136,8 +136,8 @@ public class ParentQueue extends AbstractCSQueue {
       StringBuilder labelStrBuilder = new StringBuilder();
       if (accessibleLabels != null) {
         for (String s : accessibleLabels) {
-          labelStrBuilder.append(s);
-          labelStrBuilder.append(",");
+          labelStrBuilder.append(s)
+              .append(",");
         }
       }
 

+ 9 - 9
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/QueueCapacities.java

@@ -69,15 +69,15 @@ public class QueueCapacities {
     @Override
     public String toString() {
       StringBuilder sb = new StringBuilder();
-      sb.append("{used=" + capacitiesArr[0] + "%, ");
-      sb.append("abs_used=" + capacitiesArr[1] + "%, ");
-      sb.append("max_cap=" + capacitiesArr[2] + "%, ");
-      sb.append("abs_max_cap=" + capacitiesArr[3] + "%, ");
-      sb.append("cap=" + capacitiesArr[4] + "%, ");
-      sb.append("abs_cap=" + capacitiesArr[5] + "%}");
-      sb.append("max_am_perc=" + capacitiesArr[6] + "%}");
-      sb.append("reserved_cap=" + capacitiesArr[7] + "%}");
-      sb.append("abs_reserved_cap=" + capacitiesArr[8] + "%}");
+      sb.append("{used=" + capacitiesArr[0] + "%, ")
+          .append("abs_used=" + capacitiesArr[1] + "%, ")
+          .append("max_cap=" + capacitiesArr[2] + "%, ")
+          .append("abs_max_cap=" + capacitiesArr[3] + "%, ")
+          .append("cap=" + capacitiesArr[4] + "%, ")
+          .append("abs_cap=" + capacitiesArr[5] + "%}")
+          .append("max_am_perc=" + capacitiesArr[6] + "%}")
+          .append("reserved_cap=" + capacitiesArr[7] + "%}")
+          .append("abs_reserved_cap=" + capacitiesArr[8] + "%}");
       return sb.toString();
     }
   }

+ 5 - 5
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/common/PendingAsk.java

@@ -53,11 +53,11 @@ public class PendingAsk {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("<per-allocation-resource=");
-    sb.append(getPerAllocationResource());
-    sb.append(",repeat=");
-    sb.append(getCount());
-    sb.append(">");
+    sb.append("<per-allocation-resource=")
+        .append(getPerAllocationResource())
+        .append(",repeat=")
+        .append(getCount())
+        .append(">");
     return sb.toString();
   }
 }

+ 60 - 61
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/common/fica/FiCaSchedulerApp.java

@@ -999,25 +999,24 @@ public class FiCaSchedulerApp extends SchedulerApplicationAttempt {
   protected void getPendingAppDiagnosticMessage(
       StringBuilder diagnosticMessage) {
     LeafQueue queue = getCSLeafQueue();
-    diagnosticMessage.append(" Details : AM Partition = ");
-    diagnosticMessage.append(appAMNodePartitionName.isEmpty()
-        ? NodeLabel.DEFAULT_NODE_LABEL_PARTITION : appAMNodePartitionName);
-    diagnosticMessage.append("; ");
-    diagnosticMessage.append("AM Resource Request = ");
-    diagnosticMessage.append(getAMResource(appAMNodePartitionName));
-    diagnosticMessage.append("; ");
-    diagnosticMessage.append("Queue Resource Limit for AM = ");
-    diagnosticMessage
-        .append(queue.getAMResourceLimitPerPartition(appAMNodePartitionName));
-    diagnosticMessage.append("; ");
-    diagnosticMessage.append("User AM Resource Limit of the queue = ");
-    diagnosticMessage.append(queue.getUserAMResourceLimitPerPartition(
-        appAMNodePartitionName, getUser()));
-    diagnosticMessage.append("; ");
-    diagnosticMessage.append("Queue AM Resource Usage = ");
-    diagnosticMessage.append(
-        queue.getQueueResourceUsage().getAMUsed(appAMNodePartitionName));
-    diagnosticMessage.append("; ");
+    diagnosticMessage.append(" Details : AM Partition = ")
+        .append(appAMNodePartitionName.isEmpty()
+        ? NodeLabel.DEFAULT_NODE_LABEL_PARTITION : appAMNodePartitionName)
+        .append("; ")
+        .append("AM Resource Request = ")
+        .append(getAMResource(appAMNodePartitionName))
+        .append("; ")
+        .append("Queue Resource Limit for AM = ")
+        .append(queue.getAMResourceLimitPerPartition(appAMNodePartitionName))
+        .append("; ")
+        .append("User AM Resource Limit of the queue = ")
+        .append(queue.getUserAMResourceLimitPerPartition(
+            appAMNodePartitionName, getUser()))
+        .append("; ")
+        .append("Queue AM Resource Usage = ")
+        .append(
+            queue.getQueueResourceUsage().getAMUsed(appAMNodePartitionName))
+        .append("; ");
   }
 
   protected void getActivedAppDiagnosticMessage(
@@ -1025,39 +1024,39 @@ public class FiCaSchedulerApp extends SchedulerApplicationAttempt {
     LeafQueue queue = getCSLeafQueue();
     QueueCapacities queueCapacities = queue.getQueueCapacities();
     QueueResourceQuotas queueResourceQuotas = queue.getQueueResourceQuotas();
-    diagnosticMessage.append(" Details : AM Partition = ");
-    diagnosticMessage.append(appAMNodePartitionName.isEmpty()
-        ? NodeLabel.DEFAULT_NODE_LABEL_PARTITION : appAMNodePartitionName);
-    diagnosticMessage.append(" ; ");
-    diagnosticMessage.append("Partition Resource = ");
-    diagnosticMessage.append(rmContext.getNodeLabelManager()
-        .getResourceByLabel(appAMNodePartitionName, Resources.none()));
-    diagnosticMessage.append(" ; ");
-    diagnosticMessage.append("Queue's Absolute capacity = ");
-    diagnosticMessage.append(
-        queueCapacities.getAbsoluteCapacity(appAMNodePartitionName) * 100);
-    diagnosticMessage.append(" % ; ");
-    diagnosticMessage.append("Queue's Absolute used capacity = ");
-    diagnosticMessage.append(
-        queueCapacities.getAbsoluteUsedCapacity(appAMNodePartitionName) * 100);
-    diagnosticMessage.append(" % ; ");
-    diagnosticMessage.append("Queue's Absolute max capacity = ");
-    diagnosticMessage.append(
-        queueCapacities.getAbsoluteMaximumCapacity(appAMNodePartitionName)
-            * 100);
-    diagnosticMessage.append(" % ; ");
-    diagnosticMessage.append("Queue's capacity (absolute resource) = ");
-    diagnosticMessage.append(
-        queueResourceQuotas.getEffectiveMinResource(appAMNodePartitionName));
-    diagnosticMessage.append(" ; ");
-    diagnosticMessage.append("Queue's used capacity (absolute resource) = ");
-    diagnosticMessage
-        .append(queue.getQueueResourceUsage().getUsed(appAMNodePartitionName));
-    diagnosticMessage.append(" ; ");
-    diagnosticMessage.append("Queue's max capacity (absolute resource) = ");
-    diagnosticMessage.append(
-        queueResourceQuotas.getEffectiveMaxResource(appAMNodePartitionName));
-    diagnosticMessage.append(" ; ");
+    diagnosticMessage.append(" Details : AM Partition = ")
+        .append(appAMNodePartitionName.isEmpty()
+            ? NodeLabel.DEFAULT_NODE_LABEL_PARTITION : appAMNodePartitionName)
+        .append(" ; ")
+        .append("Partition Resource = ")
+        .append(rmContext.getNodeLabelManager()
+            .getResourceByLabel(appAMNodePartitionName, Resources.none()))
+        .append(" ; ")
+        .append("Queue's Absolute capacity = ")
+        .append(
+            queueCapacities.getAbsoluteCapacity(appAMNodePartitionName) * 100)
+        .append(" % ; ")
+        .append("Queue's Absolute used capacity = ")
+        .append(
+            queueCapacities.getAbsoluteUsedCapacity(appAMNodePartitionName)
+                * 100)
+        .append(" % ; ")
+        .append("Queue's Absolute max capacity = ")
+        .append(
+            queueCapacities.getAbsoluteMaximumCapacity(appAMNodePartitionName)
+                * 100)
+        .append(" % ; ")
+        .append("Queue's capacity (absolute resource) = ")
+        .append(
+            queueResourceQuotas.getEffectiveMinResource(appAMNodePartitionName))
+        .append(" ; ")
+        .append("Queue's used capacity (absolute resource) = ")
+        .append(queue.getQueueResourceUsage().getUsed(appAMNodePartitionName))
+        .append(" ; ")
+        .append("Queue's max capacity (absolute resource) = ")
+        .append(
+            queueResourceQuotas.getEffectiveMaxResource(appAMNodePartitionName))
+        .append(" ; ");
   }
 
   /**
@@ -1082,15 +1081,15 @@ public class FiCaSchedulerApp extends SchedulerApplicationAttempt {
         appSkipNodeDiagnostics = null;
       }
       diagnosticMessageBldr.append(
-          CSAMContainerLaunchDiagnosticsConstants.LAST_NODE_PROCESSED_MSG);
-      diagnosticMessageBldr.append(node.getNodeID());
-      diagnosticMessageBldr.append(" ( Partition : ");
-      diagnosticMessageBldr.append(node.getLabels());
-      diagnosticMessageBldr.append(", Total resource : ");
-      diagnosticMessageBldr.append(node.getTotalResource());
-      diagnosticMessageBldr.append(", Available resource : ");
-      diagnosticMessageBldr.append(node.getUnallocatedResource());
-      diagnosticMessageBldr.append(" ).");
+          CSAMContainerLaunchDiagnosticsConstants.LAST_NODE_PROCESSED_MSG)
+          .append(node.getNodeID())
+          .append(" ( Partition : ")
+          .append(node.getLabels())
+          .append(", Total resource : ")
+          .append(node.getTotalResource())
+          .append(", Available resource : ")
+          .append(node.getUnallocatedResource())
+          .append(" ).");
       updateAMContainerDiagnostics(AMState.ACTIVATED, diagnosticMessageBldr.toString());
     }
   }

+ 2 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/event/ContainerPreemptEvent.java

@@ -48,8 +48,8 @@ public class ContainerPreemptEvent extends SchedulerEvent {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder(super.toString());
-    sb.append(" ").append(getAppId());
-    sb.append(" ").append(getContainer().getContainerId());
+    sb.append(" ").append(getAppId())
+        .append(" ").append(getContainer().getContainerId());
     return sb.toString();
   }
 

+ 3 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSAppAttempt.java

@@ -1399,9 +1399,9 @@ public class FSAppAttempt extends SchedulerApplicationAttempt
     }
 
     StringBuilder diagnosticMessageBldr = new StringBuilder();
-    diagnosticMessageBldr.append(" (Resource request: ");
-    diagnosticMessageBldr.append(resource);
-    diagnosticMessageBldr.append(reason);
+    diagnosticMessageBldr.append(" (Resource request: ")
+        .append(resource)
+        .append(reason);
     updateAMContainerDiagnostics(AMState.INACTIVATED,
         diagnosticMessageBldr.toString());
   }

+ 6 - 6
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java

@@ -233,12 +233,12 @@ class NodesPage extends RmView {
   private String nodesTableInit() {
     StringBuilder b = tableInit().append(", 'aaData': nodeTableData")
         .append(", bDeferRender: true").append(", bProcessing: true")
-        .append(", aoColumnDefs: [");
-    b.append("{'bSearchable': false, 'aTargets': [ 7 ]}");
-    b.append(", {'sType': 'title-numeric', 'bSearchable': false, "
-        + "'aTargets': [ 9, 10 ] }");
-    b.append(", {'sType': 'title-numeric', 'aTargets': [ 5 ]}");
-    b.append("]}");
+        .append(", aoColumnDefs: [")
+        .append("{'bSearchable': false, 'aTargets': [ 7 ]}")
+        .append(", {'sType': 'title-numeric', 'bSearchable': false, "
+            + "'aTargets': [ 9, 10 ] }")
+        .append(", {'sType': 'title-numeric', 'aTargets': [ 5 ]}")
+        .append("]}");
     return b.toString();
   }
 }

+ 6 - 5
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMErrorsAndWarningsPage.java

@@ -44,11 +44,12 @@ public class RMErrorsAndWarningsPage extends RmView {
 
   private String tablesInit() {
     StringBuilder b = tableInit().append(", aoColumnDefs: [");
-    b.append("{'sType': 'string', 'aTargets': [ 0 ]}");
-    b.append(", {'sType': 'string', 'bSearchable': true, 'aTargets': [ 1 ]}");
-    b.append(", {'sType': 'numeric', 'bSearchable': false, 'aTargets': [ 2 ]}");
-    b.append(", {'sType': 'date', 'aTargets': [ 3 ] }]");
-    b.append(", aaSorting: [[3, 'desc']]}");
+    b.append("{'sType': 'string', 'aTargets': [ 0 ]}")
+        .append(", {'sType': 'string', 'bSearchable': true, 'aTargets': [ 1 ]}")
+        .append(
+            ", {'sType': 'numeric', 'bSearchable': false, 'aTargets': [ 2 ]}")
+        .append(", {'sType': 'date', 'aTargets': [ 3 ] }]")
+        .append(", aaSorting: [[3, 'desc']]}");
     return b.toString();
   }
 }

+ 5 - 5
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/main/java/org/apache/hadoop/yarn/server/router/webapp/FederationPage.java

@@ -47,11 +47,11 @@ class FederationPage extends RouterView {
 
   private String rmsTableInit() {
     StringBuilder b = tableInit().append(", aoColumnDefs: [");
-    b.append("{'bSearchable': false, 'aTargets': [ 7 ]}");
-    b.append(", {'sType': 'title-numeric', 'bSearchable': false, "
-        + "'aTargets': [ 8, 9 ] }");
-    b.append(", {'sType': 'title-numeric', 'aTargets': [ 5 ]}");
-    b.append("]}");
+    b.append("{'bSearchable': false, 'aTargets': [ 7 ]}")
+        .append(", {'sType': 'title-numeric', 'bSearchable': false, "
+            + "'aTargets': [ 8, 9 ] }")
+        .append(", {'sType': 'title-numeric', 'aTargets': [ 5 ]}")
+        .append("]}");
     return b.toString();
   }
 }

+ 5 - 5
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/main/java/org/apache/hadoop/yarn/server/router/webapp/NodesPage.java

@@ -50,11 +50,11 @@ class NodesPage extends RouterView {
 
   private String nodesTableInit() {
     StringBuilder b = tableInit().append(", aoColumnDefs: [");
-    b.append("{'bSearchable': false, 'aTargets': [ 7 ]}");
-    b.append(", {'sType': 'title-numeric', 'bSearchable': false, "
-        + "'aTargets': [ 2, 3, 4, 5, 6 ] }");
-    b.append(", {'sType': 'title-numeric', 'aTargets': [ 5 ]}");
-    b.append("]}");
+    b.append("{'bSearchable': false, 'aTargets': [ 7 ]}")
+        .append(", {'sType': 'title-numeric', 'bSearchable': false, "
+            + "'aTargets': [ 2, 3, 4, 5, 6 ] }")
+        .append(", {'sType': 'title-numeric', 'aTargets': [ 5 ]}")
+        .append("]}");
     return b.toString();
   }
 }

+ 7 - 7
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java

@@ -375,8 +375,8 @@ public class TestContainerManagerSecurity extends KerberosSecurityTestcase {
     // authentication... It should complain saying container was recently
     // stopped.
     sb = new StringBuilder("Container ");
-    sb.append(validContainerId);
-    sb.append(" was recently stopped on node manager");
+    sb.append(validContainerId)
+        .append(" was recently stopped on node manager");
     Assert.assertTrue(testGetContainer(rpc, validAppAttemptId, validNode,
         validContainerId, validNMToken, true).contains(sb.toString()));
 
@@ -385,9 +385,9 @@ public class TestContainerManagerSecurity extends KerberosSecurityTestcase {
     
     // This should fail as container is removed from recently tracked finished
     // containers.
-    sb = new StringBuilder("Container ");
-    sb.append(validContainerId.toString());
-    sb.append(" is not handled by this NodeManager");
+    sb = new StringBuilder("Container ")
+        .append(validContainerId.toString())
+        .append(" is not handled by this NodeManager");
     Assert.assertTrue(testGetContainer(rpc, validAppAttemptId, validNode,
         validContainerId, validNMToken, false).contains(sb.toString()));
 
@@ -708,8 +708,8 @@ public class TestContainerManagerSecurity extends KerberosSecurityTestcase {
             nodeId, user, r, Priority.newInstance(0), 0);
     
     StringBuilder sb = new StringBuilder("Given Container ");
-    sb.append(cId2);
-    sb.append(" seems to have an illegally generated token.");
+    sb.append(cId2)
+        .append(" seems to have an illegally generated token.");
     Assert.assertTrue(testStartContainer(rpc, appAttemptId, nodeId,
         containerToken2, nmToken, true).contains(sb.toString()));
   }

+ 23 - 22
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineSchemaCreator.java

@@ -237,28 +237,29 @@ public final class HBaseTimelineSchemaCreator implements SchemaCreator {
 
   private static void printUsage() {
     StringBuilder usage = new StringBuilder("Command Usage: \n");
-    usage.append("TimelineSchemaCreator [-help] Display help info" +
-        " for all commands. Or\n");
-    usage.append("TimelineSchemaCreator -create [OPTIONAL_OPTIONS]" +
-        " Create hbase tables.\n\n");
-    usage.append("The Optional options for creating tables include: \n");
-    usage.append("[-entityTableName <Entity Table Name>] " +
-        "The name of the Entity table\n");
-    usage.append("[-entityMetricsTTL <Entity Table Metrics TTL>]" +
-        " TTL for metrics in the Entity table\n");
-    usage.append("[-appToflowTableName <AppToflow Table Name>]" +
-        " The name of the AppToFlow table\n");
-    usage.append("[-applicationTableName <Application Table Name>]" +
-        " The name of the Application table\n");
-    usage.append("[-applicationMetricsTTL <Application Table Metrics TTL>]" +
-        " TTL for metrics in the Application table\n");
-    usage.append("[-subApplicationTableName <SubApplication Table Name>]" +
-        " The name of the SubApplication table\n");
-    usage.append("[-subApplicationMetricsTTL " +
-        " <SubApplication Table Metrics TTL>]" +
-        " TTL for metrics in the SubApplication table\n");
-    usage.append("[-skipExistingTable] Whether to skip existing" +
-        " hbase tables\n");
+    usage
+        .append("TimelineSchemaCreator [-help] Display help info"
+            + " for all commands. Or\n")
+        .append("TimelineSchemaCreator -create [OPTIONAL_OPTIONS]" +
+            " Create hbase tables.\n\n")
+        .append("The Optional options for creating tables include: \n")
+        .append("[-entityTableName <Entity Table Name>] " +
+            "The name of the Entity table\n")
+        .append("[-entityMetricsTTL <Entity Table Metrics TTL>]" +
+            " TTL for metrics in the Entity table\n")
+        .append("[-appToflowTableName <AppToflow Table Name>]" +
+            " The name of the AppToFlow table\n")
+        .append("[-applicationTableName <Application Table Name>]" +
+            " The name of the Application table\n")
+        .append("[-applicationMetricsTTL <Application Table Metrics TTL>]" +
+            " TTL for metrics in the Application table\n")
+        .append("[-subApplicationTableName <SubApplication Table Name>]" +
+            " The name of the SubApplication table\n")
+        .append("[-subApplicationMetricsTTL "
+            + " <SubApplication Table Metrics TTL>]" +
+            " TTL for metrics in the SubApplication table\n")
+        .append("[-skipExistingTable] Whether to skip existing" +
+            " hbase tables\n");
     System.out.println(usage.toString());
   }
 

+ 5 - 5
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineSchemaUtils.java

@@ -146,11 +146,11 @@ public final class HBaseTimelineSchemaUtils {
    */
   public static String convertApplicationIdToString(ApplicationId appId) {
     StringBuilder sb = new StringBuilder(64);
-    sb.append(ApplicationId.appIdStrPrefix);
-    sb.append("_");
-    sb.append(appId.getClusterTimestamp());
-    sb.append('_');
-    sb.append(APP_ID_FORMAT.get().format(appId.getId()));
+    sb.append(ApplicationId.appIdStrPrefix)
+        .append("_")
+        .append(appId.getClusterTimestamp())
+        .append('_')
+        .append(APP_ID_FORMAT.get().format(appId.getId()));
     return sb.toString();
   }
 }

+ 2 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Separator.java

@@ -330,8 +330,8 @@ public enum Separator {
     StringBuilder sb = new StringBuilder(encode(items[0].toString()));
     // Start at 1, we've already grabbed the first value at index 0
     for (int i = 1; i < items.length; i++) {
-      sb.append(this.value);
-      sb.append(encode(items[i].toString()));
+      sb.append(this.value)
+          .append(encode(items[i].toString()));
     }
 
     return sb.toString();

+ 6 - 6
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKey.java

@@ -106,12 +106,12 @@ public class FlowRunRowKey {
   @Override
   public String toString() {
     StringBuilder flowKeyStr = new StringBuilder();
-    flowKeyStr.append("{clusterId=" + clusterId);
-    flowKeyStr.append(" userId=" + userId);
-    flowKeyStr.append(" flowName=" + flowName);
-    flowKeyStr.append(" flowRunId=");
-    flowKeyStr.append(flowRunId);
-    flowKeyStr.append("}");
+    flowKeyStr.append("{clusterId=" + clusterId)
+        .append(" userId=" + userId)
+        .append(" flowName=" + flowName)
+        .append(" flowRunId=")
+        .append(flowRunId)
+        .append("}");
     return flowKeyStr.toString();
   }