瀏覽代碼

YARN-7477. Moving logging APIs over to slf4j in hadoop-yarn-common. Contributed by Prabhu Joseph.

Akira Ajisaka 6 年之前
父節點
當前提交
bd8d299ded
共有 68 個文件被更改,包括 383 次插入329 次删除
  1. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/FileSystemBasedConfigurationProvider.java
  2. 10 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/YarnUncaughtExceptionHandler.java
  3. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ResourcePBImpl.java
  4. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/AHSProxy.java
  5. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/ClientRMProxy.java
  6. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/ConfiguredRMFailoverProxyProvider.java
  7. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/RMProxy.java
  8. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/RequestHedgingRMFailoverProxyProvider.java
  9. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/DirectTimelineWriter.java
  10. 9 9
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java
  11. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
  12. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineConnector.java
  13. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineReaderClientImpl.java
  14. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineV2ClientImpl.java
  15. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineWriter.java
  16. 9 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/AsyncDispatcher.java
  17. 9 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/EventDispatcher.java
  18. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcClientFactoryPBImpl.java
  19. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java
  20. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/HadoopYarnProtoRPC.java
  21. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java
  22. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService.java
  23. 3 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/LogAggregationFileControllerFactory.java
  24. 3 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/tfile/LogAggregationTFileController.java
  25. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager.java
  26. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore.java
  27. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NonAppendableFSNodeLabelStore.java
  28. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/store/AbstractFSNodeStore.java
  29. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AMRMTokenIdentifier.java
  30. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AMRMTokenSelector.java
  31. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AdminACLsManager.java
  32. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/ContainerTokenIdentifier.java
  33. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/ContainerTokenSelector.java
  34. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/NMTokenIdentifier.java
  35. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/NMTokenSelector.java
  36. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/YarnAuthorizationProvider.java
  37. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/ClientToAMTokenSelector.java
  38. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/RMDelegationTokenSelector.java
  39. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/TimelineDelegationTokenSelector.java
  40. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/security/ApplicationACLsManager.java
  41. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/AbstractLivelinessMonitor.java
  42. 62 61
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/AdHocLogDumper.java
  43. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java
  44. 9 9
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java
  45. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorPlugin.java
  46. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorProcessTree.java
  47. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Times.java
  48. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/WindowsBasedProcessTree.java
  49. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/YarnVersionInfo.java
  50. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/resource/DefaultResourceCalculator.java
  51. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/resource/DominantResourceCalculator.java
  52. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/resource/Resources.java
  53. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/GenericExceptionHandler.java
  54. 5 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java
  55. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerResourceIncreaseRPC.java
  56. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/BasePBImplRecordsTest.java
  57. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timeline/TestTimelineRecords.java
  58. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timelineservice/TestTimelineServiceRecords.java
  59. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientForATS1_5.java
  60. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientV2Impl.java
  61. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/event/InlineDispatcher.java
  62. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java
  63. 17 14
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestAdHocLogDumper.java
  64. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestFSDownload.java
  65. 19 13
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestLog4jWarningErrorMetricsAppender.java
  66. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestProcfsBasedProcessTree.java
  67. 4 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestRackResolver.java
  68. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsBasedProcessTree.java

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/FileSystemBasedConfigurationProvider.java

@@ -21,8 +21,8 @@ package org.apache.hadoop.yarn;
 import java.io.IOException;
 import java.io.InputStream;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.conf.Configuration;
@@ -37,8 +37,8 @@ import org.apache.hadoop.yarn.exceptions.YarnException;
 public class FileSystemBasedConfigurationProvider
     extends ConfigurationProvider {
 
-  private static final Log LOG = LogFactory
-      .getLog(FileSystemBasedConfigurationProvider.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(FileSystemBasedConfigurationProvider.class);
   private FileSystem fs;
   private Path configDir;
 

+ 10 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/YarnUncaughtExceptionHandler.java

@@ -20,8 +20,10 @@ package org.apache.hadoop.yarn;
 
 import java.lang.Thread.UncaughtExceptionHandler;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.Marker;
+import org.slf4j.MarkerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Evolving;
 import org.apache.hadoop.util.ExitUtil;
@@ -39,7 +41,10 @@ import org.apache.hadoop.util.ShutdownHookManager;
 @Public
 @Evolving
 public class YarnUncaughtExceptionHandler implements UncaughtExceptionHandler {
-  private static final Log LOG = LogFactory.getLog(YarnUncaughtExceptionHandler.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(YarnUncaughtExceptionHandler.class);
+  private static final Marker FATAL =
+      MarkerFactory.getMarker("FATAL");
   
   @Override
   public void uncaughtException(Thread t, Throwable e) {
@@ -48,7 +53,8 @@ public class YarnUncaughtExceptionHandler implements UncaughtExceptionHandler {
       		"down, so ignoring this", e);
     } else if(e instanceof Error) {
       try {
-        LOG.fatal("Thread " + t + " threw an Error.  Shutting down now...", e);
+        LOG.error(FATAL,
+            "Thread " + t + " threw an Error.  Shutting down now...", e);
       } catch (Throwable err) {
         //We don't want to not exit because of an issue with logging
       }

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ResourcePBImpl.java

@@ -20,8 +20,8 @@ package org.apache.hadoop.yarn.api.records.impl.pb;
 
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableSet;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.yarn.api.protocolrecords.ResourceTypes;
@@ -41,7 +41,8 @@ import java.util.Map;
 @Unstable
 public class ResourcePBImpl extends Resource {
 
-  private static final Log LOG = LogFactory.getLog(ResourcePBImpl.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ResourcePBImpl.class);
 
   ResourceProto proto = ResourceProto.getDefaultInstance();
   ResourceProto.Builder builder = null;

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/AHSProxy.java

@@ -22,8 +22,8 @@ import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.security.PrivilegedAction;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -35,7 +35,8 @@ import org.apache.hadoop.yarn.ipc.YarnRPC;
 @SuppressWarnings("unchecked")
 public class AHSProxy<T> {
 
-  private static final Log LOG = LogFactory.getLog(AHSProxy.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(AHSProxy.class);
 
   public static <T> T createAHSProxy(final Configuration conf,
       final Class<T> protocol, InetSocketAddress ahsAddress) throws IOException {

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/ClientRMProxy.java

@@ -22,8 +22,8 @@ import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.util.ArrayList;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -47,7 +47,8 @@ import com.google.common.base.Preconditions;
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class ClientRMProxy<T> extends RMProxy<T>  {
-  private static final Log LOG = LogFactory.getLog(ClientRMProxy.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ClientRMProxy.class);
 
   private interface ClientRMProtocols extends ApplicationClientProtocol,
       ApplicationMasterProtocol, ResourceManagerAdministrationProtocol {

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/ConfiguredRMFailoverProxyProvider.java

@@ -25,8 +25,8 @@ import java.util.Collection;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -39,8 +39,8 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
 @InterfaceStability.Unstable
 public class ConfiguredRMFailoverProxyProvider<T>
     implements RMFailoverProxyProvider<T> {
-  private static final Log LOG =
-      LogFactory.getLog(ConfiguredRMFailoverProxyProvider.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ConfiguredRMFailoverProxyProvider.class);
 
   private int currentProxyIndex = 0;
   Map<String, T> proxies = new HashMap<String, T>();

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/RMProxy.java

@@ -30,8 +30,8 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -56,7 +56,8 @@ import com.google.common.annotations.VisibleForTesting;
 @SuppressWarnings("unchecked")
 public class RMProxy<T> {
 
-  private static final Log LOG = LogFactory.getLog(RMProxy.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(RMProxy.class);
   private UserGroupInformation user;
 
   protected RMProxy() {

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/RequestHedgingRMFailoverProxyProvider.java

@@ -34,8 +34,8 @@ import java.util.concurrent.ExecutorCompletionService;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Future;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryProxy;
@@ -55,8 +55,8 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
 public class RequestHedgingRMFailoverProxyProvider<T>
     extends ConfiguredRMFailoverProxyProvider<T> {
 
-  private static final Log LOG =
-      LogFactory.getLog(RequestHedgingRMFailoverProxyProvider.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(RequestHedgingRMFailoverProxyProvider.class);
 
   private volatile String successfulProxy = null;
   private ProxyInfo<T> wrappedProxy = null;

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/DirectTimelineWriter.java

@@ -21,8 +21,8 @@ package org.apache.hadoop.yarn.client.api.impl;
 import java.io.IOException;
 import java.net.URI;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -42,8 +42,8 @@ import com.sun.jersey.api.client.Client;
 @Unstable
 public class DirectTimelineWriter extends TimelineWriter{
 
-  private static final Log LOG = LogFactory
-      .getLog(DirectTimelineWriter.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(DirectTimelineWriter.class);
 
   public DirectTimelineWriter(UserGroupInformation authUgi,
       Client client, URI resURI) {

+ 9 - 9
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java

@@ -38,8 +38,8 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.conf.Configuration;
@@ -78,8 +78,8 @@ import com.sun.jersey.api.client.Client;
 @Unstable
 public class FileSystemTimelineWriter extends TimelineWriter{
 
-  private static final Log LOG = LogFactory
-      .getLog(FileSystemTimelineWriter.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(FileSystemTimelineWriter.class);
 
   // App log directory must be readable by group so server can access logs
   // and writable by group so it can be deleted by server
@@ -267,7 +267,7 @@ public class FileSystemTimelineWriter extends TimelineWriter{
       LOG.debug("Closing cache");
       logFDsCache.flush();
     }
-    IOUtils.cleanup(LOG, logFDsCache, fs);
+    IOUtils.cleanupWithLogger(LOG, logFDsCache, fs);
   }
 
   @Override
@@ -355,8 +355,8 @@ public class FileSystemTimelineWriter extends TimelineWriter{
 
     public void close() {
       if (stream != null) {
-        IOUtils.cleanup(LOG, jsonGenerator);
-        IOUtils.cleanup(LOG, stream);
+        IOUtils.cleanupWithLogger(LOG, jsonGenerator);
+        IOUtils.cleanupWithLogger(LOG, stream);
         stream = null;
         jsonGenerator = null;
       }
@@ -559,7 +559,7 @@ public class FileSystemTimelineWriter extends TimelineWriter{
           flush();
         } catch (Exception e) {
           if (LOG.isDebugEnabled()) {
-            LOG.debug(e);
+            LOG.debug(e.toString());
           }
         }
       }
@@ -636,7 +636,7 @@ public class FileSystemTimelineWriter extends TimelineWriter{
         try {
           cleanInActiveFDs();
         } catch (Exception e) {
-          LOG.warn(e);
+          LOG.warn(e.toString());
         }
       }
     }

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java

@@ -28,8 +28,8 @@ import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.Options;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Evolving;
 import org.apache.hadoop.conf.Configuration;
@@ -58,7 +58,8 @@ import com.sun.jersey.api.client.Client;
 @Evolving
 public class TimelineClientImpl extends TimelineClient {
 
-  private static final Log LOG = LogFactory.getLog(TimelineClientImpl.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TimelineClientImpl.class);
   private static final ObjectMapper MAPPER = new ObjectMapper();
   private static final String RESOURCE_URI_STR_V1 = "/ws/v1/timeline/";
 

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineConnector.java

@@ -34,8 +34,8 @@ import javax.net.ssl.HostnameVerifier;
 import javax.net.ssl.HttpsURLConnection;
 import javax.net.ssl.SSLSocketFactory;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -73,7 +73,8 @@ import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
 public class TimelineConnector extends AbstractService {
 
   private static final Joiner JOINER = Joiner.on("");
-  private static final Log LOG = LogFactory.getLog(TimelineConnector.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TimelineConnector.class);
   public final static int DEFAULT_SOCKET_TIMEOUT = 1 * 60 * 1000; // 1 minute
 
   private SSLFactory sslFactory;

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineReaderClientImpl.java

@@ -19,8 +19,8 @@ package org.apache.hadoop.yarn.client.api.impl;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.sun.jersey.core.util.MultivaluedMapImpl;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -53,8 +53,8 @@ import static org.apache.hadoop.yarn.util.StringHelper.PATH_JOINER;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class TimelineReaderClientImpl extends TimelineReaderClient {
-  private static final Log LOG =
-      LogFactory.getLog(TimelineReaderClientImpl.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TimelineReaderClientImpl.class);
 
   private static final String RESOURCE_URI_STR_V2 = "/ws/v2/timeline/";
 

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineV2ClientImpl.java

@@ -36,8 +36,8 @@ import java.util.concurrent.TimeUnit;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.MultivaluedMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
@@ -64,7 +64,8 @@ import com.sun.jersey.core.util.MultivaluedMapImpl;
  *
  */
 public class TimelineV2ClientImpl extends TimelineV2Client {
-  private static final Log LOG = LogFactory.getLog(TimelineV2ClientImpl.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TimelineV2ClientImpl.class);
 
   private static final String RESOURCE_URI_STR_V2 = "/ws/v2/timeline/";
 

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineWriter.java

@@ -26,8 +26,8 @@ import java.net.URI;
 import java.security.PrivilegedExceptionAction;
 import javax.ws.rs.core.MediaType;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -52,8 +52,8 @@ import com.sun.jersey.api.client.WebResource;
 @Unstable
 public abstract class TimelineWriter implements Flushable {
 
-  private static final Log LOG = LogFactory
-      .getLog(TimelineWriter.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(TimelineWriter.class);
 
   private UserGroupInformation authUgi;
   private Client client;

+ 9 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/AsyncDispatcher.java

@@ -25,8 +25,10 @@ import java.util.Map;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.LinkedBlockingQueue;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.Marker;
+import org.slf4j.MarkerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Evolving;
 import org.apache.hadoop.service.AbstractService;
@@ -46,7 +48,10 @@ import com.google.common.annotations.VisibleForTesting;
 @Evolving
 public class AsyncDispatcher extends AbstractService implements Dispatcher {
 
-  private static final Log LOG = LogFactory.getLog(AsyncDispatcher.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(AsyncDispatcher.class);
+  private static final Marker FATAL =
+      MarkerFactory.getMarker("FATAL");
 
   private final BlockingQueue<Event> eventQueue;
   private volatile int lastEventQueueSizeLogged = 0;
@@ -200,7 +205,7 @@ public class AsyncDispatcher extends AbstractService implements Dispatcher {
       }
     } catch (Throwable t) {
       //TODO Maybe log the state of the queue
-      LOG.fatal("Error in dispatcher thread", t);
+      LOG.error(FATAL, "Error in dispatcher thread", t);
       // If serviceStop is called, we should exit this thread gracefully.
       if (exitOnDispatchException
           && (ShutdownHookManager.get().isShutdownInProgress()) == false

+ 9 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/EventDispatcher.java

@@ -19,8 +19,10 @@
 package org.apache.hadoop.yarn.event;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.Marker;
+import org.slf4j.MarkerFactory;
 import org.apache.hadoop.service.AbstractService;
 import org.apache.hadoop.util.ShutdownHookManager;
 import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
@@ -46,7 +48,10 @@ public class EventDispatcher<T extends Event> extends
   private volatile boolean stopped = false;
   private boolean shouldExitOnError = true;
 
-  private static final Log LOG = LogFactory.getLog(EventDispatcher.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(EventDispatcher.class);
+  private static final Marker FATAL =
+      MarkerFactory.getMarker("FATAL");
 
   private final class EventProcessor implements Runnable {
     @Override
@@ -72,7 +77,7 @@ public class EventDispatcher<T extends Event> extends
             LOG.warn("Exception during shutdown: ", t);
             break;
           }
-          LOG.fatal("Error in handling event type " + event.getType()
+          LOG.error(FATAL, "Error in handling event type " + event.getType()
               + " to the Event Dispatcher", t);
           if (shouldExitOnError
               && !ShutdownHookManager.get().isShutdownInProgress()) {

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcClientFactoryPBImpl.java

@@ -27,8 +27,8 @@ import java.net.InetSocketAddress;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.conf.Configuration;
@@ -38,8 +38,8 @@ import org.apache.hadoop.yarn.factories.RpcClientFactory;
 @Private
 public class RpcClientFactoryPBImpl implements RpcClientFactory {
 
-  private static final Log LOG = LogFactory
-      .getLog(RpcClientFactoryPBImpl.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(RpcClientFactoryPBImpl.class);
 
   private static final String PB_IMPL_PACKAGE_SUFFIX = "impl.pb.client";
   private static final String PB_IMPL_CLASS_SUFFIX = "PBClientImpl";

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java

@@ -26,8 +26,8 @@ import java.net.InetSocketAddress;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ipc.ProtobufRpcEngine;
@@ -43,7 +43,8 @@ import com.google.protobuf.BlockingService;
 @Private
 public class RpcServerFactoryPBImpl implements RpcServerFactory {
 
-  private static final Log LOG = LogFactory.getLog(RpcServerFactoryPBImpl.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(RpcServerFactoryPBImpl.class);
   private static final String PROTO_GEN_PACKAGE_NAME = "org.apache.hadoop.yarn.proto";
   private static final String PROTO_GEN_CLASS_SUFFIX = "Service";
   private static final String PB_IMPL_PACKAGE_SUFFIX = "impl.pb.service";

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/HadoopYarnProtoRPC.java

@@ -20,8 +20,8 @@ package org.apache.hadoop.yarn.ipc;
 
 import java.net.InetSocketAddress;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ipc.Server;
@@ -39,7 +39,8 @@ import org.apache.hadoop.yarn.factory.providers.RpcFactoryProvider;
 @InterfaceAudience.LimitedPrivate({ "MapReduce", "YARN" })
 public class HadoopYarnProtoRPC extends YarnRPC {
 
-  private static final Log LOG = LogFactory.getLog(HadoopYarnProtoRPC.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(HadoopYarnProtoRPC.class);
 
   @Override
   public Object getProxy(Class protocol, InetSocketAddress addr,

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java

@@ -20,8 +20,8 @@ package org.apache.hadoop.yarn.ipc;
 
 import java.net.InetSocketAddress;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ipc.Server;
@@ -35,7 +35,8 @@ import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
  */
 @InterfaceAudience.LimitedPrivate({ "MapReduce", "YARN" })
 public abstract class YarnRPC {
-  private static final Log LOG = LogFactory.getLog(YarnRPC.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(YarnRPC.class);
   
   public abstract Object getProxy(Class protocol, InetSocketAddress addr,
       Configuration conf);

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService.java

@@ -22,8 +22,8 @@ import java.io.IOException;
 import java.util.Timer;
 import java.util.TimerTask;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
@@ -50,7 +50,8 @@ import com.google.common.annotations.VisibleForTesting;
  */
 @InterfaceAudience.LimitedPrivate({"yarn", "mapreduce"})
 public class AggregatedLogDeletionService extends AbstractService {
-  private static final Log LOG = LogFactory.getLog(AggregatedLogDeletionService.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(AggregatedLogDeletionService.class);
   
   private Timer timer = null;
   private long checkIntervalMsecs;

+ 3 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/LogAggregationFileControllerFactory.java

@@ -29,8 +29,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.regex.Pattern;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.conf.Configuration;
@@ -51,7 +51,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
 @Unstable
 public class LogAggregationFileControllerFactory {
 
-  private static final Log LOG = LogFactory.getLog(
+  private static final Logger LOG = LoggerFactory.getLogger(
       LogAggregationFileControllerFactory.class);
   private final Pattern p = Pattern.compile(
       "^[A-Za-z_]+[A-Za-z0-9_]*$");

+ 3 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/tfile/LogAggregationTFileController.java

@@ -27,8 +27,8 @@ import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.commons.math3.util.Pair;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
@@ -67,7 +67,7 @@ import org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block;
 public class LogAggregationTFileController
     extends LogAggregationFileController {
 
-  private static final Log LOG = LogFactory.getLog(
+  private static final Logger LOG = LoggerFactory.getLogger(
       LogAggregationTFileController.class);
 
   private LogWriter writer;

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager.java

@@ -37,8 +37,8 @@ import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
 
 import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.service.AbstractService;
@@ -63,7 +63,8 @@ import com.google.common.collect.ImmutableSet;
 
 @Private
 public class CommonNodeLabelsManager extends AbstractService {
-  protected static final Log LOG = LogFactory.getLog(CommonNodeLabelsManager.class);
+  protected static final Logger LOG =
+      LoggerFactory.getLogger(CommonNodeLabelsManager.class);
   public static final Set<String> EMPTY_STRING_SET = Collections
       .unmodifiableSet(new HashSet<String>(0));
   public static final Set<NodeLabel> EMPTY_NODELABEL_SET = Collections

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.yarn.nodelabels;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -46,8 +46,8 @@ import java.util.Set;
 public class FileSystemNodeLabelsStore
     extends AbstractFSNodeStore<CommonNodeLabelsManager>
     implements NodeLabelsStore {
-  protected static final Log LOG =
-      LogFactory.getLog(FileSystemNodeLabelsStore.class);
+  protected static final Logger LOG =
+      LoggerFactory.getLogger(FileSystemNodeLabelsStore.class);
 
   protected static final String DEFAULT_DIR_NAME = "node-labels";
   protected static final String MIRROR_FILENAME = "nodelabel.mirror";

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NonAppendableFSNodeLabelStore.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.yarn.nodelabels;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.yarn.api.records.NodeId;
@@ -39,8 +39,8 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
  * Store implementation for Non Appendable File Store.
  */
 public class NonAppendableFSNodeLabelStore extends FileSystemNodeLabelsStore {
-  protected static final Log
-      LOG = LogFactory.getLog(NonAppendableFSNodeLabelStore.class);
+  protected static final Logger LOG =
+      LoggerFactory.getLogger(NonAppendableFSNodeLabelStore.class);
 
   @Override
   public void close() throws IOException {

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/store/AbstractFSNodeStore.java

@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.yarn.nodelabels.store;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -40,7 +40,8 @@ import java.io.IOException;
  */
 public abstract class AbstractFSNodeStore<M> {
 
-  protected static final Log LOG = LogFactory.getLog(AbstractFSNodeStore.class);
+  protected static final Logger LOG =
+      LoggerFactory.getLogger(AbstractFSNodeStore.class);
 
   private StoreType storeType;
   private FSDataOutputStream editlogOs;

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AMRMTokenIdentifier.java

@@ -25,8 +25,8 @@ import java.io.DataOutput;
 import java.io.IOException;
 
 import com.google.protobuf.InvalidProtocolBufferException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
@@ -51,7 +51,8 @@ import com.google.protobuf.TextFormat;
 @Evolving
 public class AMRMTokenIdentifier extends TokenIdentifier {
 
-  private static final Log LOG = LogFactory.getLog(AMRMTokenIdentifier.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(AMRMTokenIdentifier.class);
 
   public static final Text KIND_NAME = new Text("YARN_AM_RM_TOKEN");
   private AMRMTokenIdentifierProto proto;

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AMRMTokenSelector.java

@@ -20,8 +20,8 @@ package org.apache.hadoop.yarn.security;
 
 import java.util.Collection;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Evolving;
 import org.apache.hadoop.io.Text;
@@ -34,8 +34,8 @@ import org.apache.hadoop.security.token.TokenSelector;
 public class AMRMTokenSelector implements
     TokenSelector<AMRMTokenIdentifier> {
 
-  private static final Log LOG = LogFactory
-      .getLog(AMRMTokenSelector.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(AMRMTokenSelector.class);
 
   @SuppressWarnings("unchecked")
   public Token<AMRMTokenIdentifier> selectToken(Text service,

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AdminACLsManager.java

@@ -20,8 +20,8 @@ package org.apache.hadoop.yarn.security;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.conf.Configuration;
@@ -36,7 +36,8 @@ public class AdminACLsManager {
   /**
    * Log object for this class
    */
-  static Log LOG = LogFactory.getLog(AdminACLsManager.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(AdminACLsManager.class);
 
   /**
    * The current user at the time of object creation

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/ContainerTokenIdentifier.java

@@ -28,8 +28,8 @@ import java.util.HashSet;
 import java.util.Set;
 
 import com.google.protobuf.InvalidProtocolBufferException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Evolving;
@@ -68,7 +68,8 @@ import com.google.protobuf.TextFormat;
 @Evolving
 public class ContainerTokenIdentifier extends TokenIdentifier {
 
-  private static Log LOG = LogFactory.getLog(ContainerTokenIdentifier.class);
+  private final static Logger LOG =
+      LoggerFactory.getLogger(ContainerTokenIdentifier.class);
 
   public static final Text KIND = new Text("ContainerToken");
 

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/ContainerTokenSelector.java

@@ -20,8 +20,8 @@ package org.apache.hadoop.yarn.security;
 
 import java.util.Collection;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Stable;
 import org.apache.hadoop.io.Text;
@@ -34,8 +34,8 @@ import org.apache.hadoop.security.token.TokenSelector;
 public class ContainerTokenSelector implements
     TokenSelector<ContainerTokenIdentifier> {
 
-  private static final Log LOG = LogFactory
-      .getLog(ContainerTokenSelector.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(ContainerTokenSelector.class);
 
   @SuppressWarnings("unchecked")
   @Override

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/NMTokenIdentifier.java

@@ -25,8 +25,8 @@ import java.io.DataOutput;
 import java.io.IOException;
 
 import com.google.protobuf.InvalidProtocolBufferException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Evolving;
 import org.apache.hadoop.io.IOUtils;
@@ -46,7 +46,8 @@ import com.google.protobuf.TextFormat;
 @Evolving
 public class NMTokenIdentifier extends TokenIdentifier {
 
-  private static Log LOG = LogFactory.getLog(NMTokenIdentifier.class);
+  private final static Logger LOG =
+      LoggerFactory.getLogger(NMTokenIdentifier.class);
 
   public static final Text KIND = new Text("NMToken");
   

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/NMTokenSelector.java

@@ -20,8 +20,8 @@ package org.apache.hadoop.yarn.security;
 
 import java.util.Collection;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
@@ -30,8 +30,8 @@ import org.apache.hadoop.security.token.TokenSelector;
 public class NMTokenSelector implements
     TokenSelector<NMTokenIdentifier> {
 
-  private static final Log LOG = LogFactory
-      .getLog(NMTokenSelector.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(NMTokenSelector.class);
 
   @SuppressWarnings("unchecked")
   @Override

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/YarnAuthorizationProvider.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.yarn.security;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.conf.Configuration;
@@ -41,7 +41,8 @@ import java.util.List;
 @Unstable
 public abstract class YarnAuthorizationProvider {
 
-  private static final Log LOG = LogFactory.getLog(YarnAuthorizationProvider.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(YarnAuthorizationProvider.class);
 
   private static YarnAuthorizationProvider authorizer = null;
 

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/ClientToAMTokenSelector.java

@@ -20,8 +20,8 @@ package org.apache.hadoop.yarn.security.client;
 
 import java.util.Collection;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
@@ -30,8 +30,8 @@ import org.apache.hadoop.security.token.TokenSelector;
 public class ClientToAMTokenSelector implements
     TokenSelector<ClientToAMTokenIdentifier> {
 
-  private static final Log LOG = LogFactory
-      .getLog(ClientToAMTokenSelector.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(ClientToAMTokenSelector.class);
 
   @SuppressWarnings("unchecked")
   public Token<ClientToAMTokenIdentifier> selectToken(Text service,

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/RMDelegationTokenSelector.java

@@ -20,8 +20,8 @@ package org.apache.hadoop.yarn.security.client;
 
 import java.util.Collection;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Stable;
 import org.apache.hadoop.io.Text;
@@ -34,8 +34,8 @@ import org.apache.hadoop.security.token.TokenSelector;
 public class RMDelegationTokenSelector implements
     TokenSelector<RMDelegationTokenIdentifier> {
 
-  private static final Log LOG = LogFactory
-      .getLog(RMDelegationTokenSelector.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(RMDelegationTokenSelector.class);
 
   private boolean checkService(Text service,
       Token<? extends TokenIdentifier> token) {

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/TimelineDelegationTokenSelector.java

@@ -20,8 +20,8 @@ package org.apache.hadoop.yarn.security.client;
 
 import java.util.Collection;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Evolving;
 import org.apache.hadoop.io.Text;
@@ -34,8 +34,8 @@ import org.apache.hadoop.security.token.TokenSelector;
 public class TimelineDelegationTokenSelector
     implements TokenSelector<TimelineDelegationTokenIdentifier> {
 
-  private static final Log LOG = LogFactory
-      .getLog(TimelineDelegationTokenSelector.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(TimelineDelegationTokenSelector.class);
 
   @SuppressWarnings("unchecked")
   public Token<TimelineDelegationTokenIdentifier> selectToken(Text service,

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/security/ApplicationACLsManager.java

@@ -23,8 +23,8 @@ import java.util.Map.Entry;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.AccessControlException;
@@ -40,8 +40,8 @@ import com.google.common.annotations.VisibleForTesting;
 @InterfaceAudience.Private
 public class ApplicationACLsManager {
 
-  private static final Log LOG = LogFactory
-      .getLog(ApplicationACLsManager.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(ApplicationACLsManager.class);
 
   private static AccessControlList DEFAULT_YARN_APP_ACL 
     = new AccessControlList(YarnConfiguration.DEFAULT_YARN_APP_ACL);

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/AbstractLivelinessMonitor.java

@@ -22,8 +22,8 @@ import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Evolving;
 import org.apache.hadoop.service.AbstractService;
@@ -37,7 +37,8 @@ import org.apache.hadoop.service.AbstractService;
 @Evolving
 public abstract class AbstractLivelinessMonitor<O> extends AbstractService {
 
-  private static final Log LOG = LogFactory.getLog(AbstractLivelinessMonitor.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(AbstractLivelinessMonitor.class);
 
   //thread which runs periodically to see the last time since a heartbeat is
   //received.

+ 62 - 61
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/AdHocLogDumper.java

@@ -18,25 +18,34 @@
 
 package org.apache.hadoop.yarn.util;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.impl.Log4JLogger;
+import org.apache.log4j.AppenderSkeleton;
+import org.apache.log4j.FileAppender;
+import org.apache.log4j.Layout;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.PatternLayout;
+import org.apache.log4j.Priority;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
-import org.apache.log4j.*;
 
 import com.google.common.annotations.VisibleForTesting;
 
 import java.io.File;
 import java.io.IOException;
-import java.util.*;
+import java.util.Enumeration;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.Timer;
+import java.util.TimerTask;
 
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class AdHocLogDumper {
 
-  private static final Log LOG = LogFactory.getLog(AdHocLogDumper.class);
+  private static final Logger LOG =
+      LogManager.getLogger(AdHocLogDumper.class);
 
   private String name;
   private String targetFilename;
@@ -54,58 +63,53 @@ public class AdHocLogDumper {
 
   public void dumpLogs(String level, int timePeriod)
       throws YarnRuntimeException, IOException {
-    synchronized (lock) {
+    synchronized (lock){
       if (logFlag) {
         LOG.info("Attempt to dump logs when appender is already running");
         throw new YarnRuntimeException("Appender is already dumping logs");
       }
       Level targetLevel = Level.toLevel(level);
-      Log log = LogFactory.getLog(name);
+      Logger logger = LogManager.getLogger(name);
       appenderLevels.clear();
-      if (log instanceof Log4JLogger) {
-        Logger packageLogger = ((Log4JLogger) log).getLogger();
-        currentLogLevel = packageLogger.getLevel();
-        Level currentEffectiveLevel = packageLogger.getEffectiveLevel();
+      currentLogLevel = logger.getLevel();
+      Level currentEffectiveLevel = logger.getEffectiveLevel();
 
-        // make sure we can create the appender first
-        Layout layout = new PatternLayout("%d{ISO8601} %p %c: %m%n");
-        FileAppender fApp;
-        File file =
-            new File(System.getProperty("yarn.log.dir"), targetFilename);
-        try {
-          fApp = new FileAppender(layout, file.getAbsolutePath(), false);
-        } catch (IOException ie) {
-          LOG
-            .warn(
-              "Error creating file, can't dump logs to "
-                  + file.getAbsolutePath(), ie);
-          throw ie;
-        }
-        fApp.setName(AdHocLogDumper.AD_HOC_DUMPER_APPENDER);
-        fApp.setThreshold(targetLevel);
+      // make sure we can create the appender first
+      Layout layout = new PatternLayout("%d{ISO8601} %p %c: %m%n");
+      FileAppender fApp;
+      File file =
+          new File(System.getProperty("yarn.log.dir"), targetFilename);
+      try {
+        fApp = new FileAppender(layout, file.getAbsolutePath(), false);
+      } catch (IOException ie) {
+        LOG.warn("Error creating file, can't dump logs to "
+            + file.getAbsolutePath(), ie);
+        throw ie;
+      }
+      fApp.setName(AdHocLogDumper.AD_HOC_DUMPER_APPENDER);
+      fApp.setThreshold(targetLevel);
 
-        // get current threshold of all appenders and set it to the effective
-        // level
-        for (Enumeration appenders = Logger.getRootLogger().getAllAppenders(); appenders
-          .hasMoreElements();) {
-          Object obj = appenders.nextElement();
-          if (obj instanceof AppenderSkeleton) {
-            AppenderSkeleton appender = (AppenderSkeleton) obj;
-            appenderLevels.put(appender.getName(), appender.getThreshold());
-            appender.setThreshold(currentEffectiveLevel);
-          }
+      // get current threshold of all appenders and set it to the effective
+      // level
+      for (Enumeration appenders = Logger.getRootLogger().getAllAppenders();
+          appenders.hasMoreElements();) {
+        Object obj = appenders.nextElement();
+        if (obj instanceof AppenderSkeleton) {
+          AppenderSkeleton appender = (AppenderSkeleton) obj;
+          appenderLevels.put(appender.getName(), appender.getThreshold());
+          appender.setThreshold(currentEffectiveLevel);
         }
+      }
 
-        packageLogger.addAppender(fApp);
-        LOG.info("Dumping adhoc logs for " + name + " to "
-            + file.getAbsolutePath() + " for " + timePeriod + " milliseconds");
-        packageLogger.setLevel(targetLevel);
-        logFlag = true;
+      logger.addAppender(fApp);
+      LOG.info("Dumping adhoc logs for " + name + " to "
+          + file.getAbsolutePath() + " for " + timePeriod + " milliseconds");
+      logger.setLevel(targetLevel);
+      logFlag = true;
 
-        TimerTask restoreLogLevel = new RestoreLogLevel();
-        Timer restoreLogLevelTimer = new Timer();
-        restoreLogLevelTimer.schedule(restoreLogLevel, timePeriod);
-      }
+      TimerTask restoreLogLevel = new RestoreLogLevel();
+      Timer restoreLogLevelTimer = new Timer();
+      restoreLogLevelTimer.schedule(restoreLogLevel, timePeriod);
     }
   }
 
@@ -117,22 +121,19 @@ public class AdHocLogDumper {
   class RestoreLogLevel extends TimerTask {
     @Override
     public void run() {
-      Log log = LogFactory.getLog(name);
-      if (log instanceof Log4JLogger) {
-        Logger logger = ((Log4JLogger) log).getLogger();
-        logger.removeAppender(AD_HOC_DUMPER_APPENDER);
-        logger.setLevel(currentLogLevel);
-        for (Enumeration appenders = Logger.getRootLogger().getAllAppenders(); appenders
-          .hasMoreElements();) {
-          Object obj = appenders.nextElement();
-          if (obj instanceof AppenderSkeleton) {
-            AppenderSkeleton appender = (AppenderSkeleton) obj;
-            appender.setThreshold(appenderLevels.get(appender.getName()));
-          }
+      Logger logger = LogManager.getLogger(name);
+      logger.removeAppender(AD_HOC_DUMPER_APPENDER);
+      logger.setLevel(currentLogLevel);
+      for (Enumeration appenders = Logger.getRootLogger().getAllAppenders();
+          appenders.hasMoreElements();) {
+        Object obj = appenders.nextElement();
+        if (obj instanceof AppenderSkeleton) {
+          AppenderSkeleton appender = (AppenderSkeleton) obj;
+          appender.setThreshold(appenderLevels.get(appender.getName()));
         }
-        logFlag = false;
-        LOG.info("Done dumping adhoc logs for " + name);
       }
+      logFlag = false;
+      LOG.info("Done dumping adhoc logs for " + name);
     }
   }
 }

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java

@@ -32,8 +32,8 @@ import java.util.regex.Pattern;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.conf.Configuration;
@@ -66,7 +66,8 @@ import org.apache.hadoop.yarn.exceptions.YarnException;
 @LimitedPrivate({"YARN", "MapReduce"})
 public class FSDownload implements Callable<Path> {
 
-  private static final Log LOG = LogFactory.getLog(FSDownload.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(FSDownload.class);
 
   private FileContext files;
   private final UserGroupInformation userUgi;

+ 9 - 9
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java

@@ -44,8 +44,8 @@ import org.apache.commons.io.filefilter.DirectoryFileFilter;
 import org.apache.commons.io.filefilter.RegexFileFilter;
 import org.apache.commons.lang3.ArrayUtils;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -61,8 +61,8 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
 @InterfaceStability.Unstable
 public class ProcfsBasedProcessTree extends ResourceCalculatorProcessTree {
 
-  static final Log LOG = LogFactory
-      .getLog(ProcfsBasedProcessTree.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(ProcfsBasedProcessTree.class);
 
   private static final String PROCFS = "/proc/";
 
@@ -264,7 +264,7 @@ public class ProcfsBasedProcessTree extends ResourceCalculatorProcessTree {
         }
       }
 
-      LOG.debug(this);
+      LOG.debug(this.toString());
 
       if (smapsEnabled) {
         // Update smaps info
@@ -409,8 +409,8 @@ public class ProcfsBasedProcessTree extends ResourceCalculatorProcessTree {
                     + ", total : " + (total * KB_TO_BYTES));
               }
             }
+            LOG.debug(procMemInfo.toString());
           }
-          LOG.debug(procMemInfo);
         }
       }
     }
@@ -807,11 +807,11 @@ public class ProcfsBasedProcessTree extends ResourceCalculatorProcessTree {
         }
       }
     } catch (FileNotFoundException f) {
-      LOG.error(f);
+      LOG.error(f.toString());
     } catch (IOException e) {
-      LOG.error(e);
+      LOG.error(e.toString());
     } catch (Throwable t) {
-      LOG.error(t);
+      LOG.error(t.toString());
     } finally {
       IOUtils.closeQuietly(in);
     }

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorPlugin.java

@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.yarn.util;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -33,8 +33,8 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
 @InterfaceAudience.LimitedPrivate({"YARN", "MAPREDUCE"})
 @InterfaceStability.Unstable
 public class ResourceCalculatorPlugin extends Configured {
-  private static final Log LOG =
-      LogFactory.getLog(ResourceCalculatorPlugin.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ResourceCalculatorPlugin.class);
 
   private final SysInfo sys;
 

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorProcessTree.java

@@ -20,8 +20,8 @@ package org.apache.hadoop.yarn.util;
 
 import java.lang.reflect.Constructor;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Evolving;
@@ -38,8 +38,8 @@ import org.apache.hadoop.yarn.exceptions.YarnException;
 @Public
 @Evolving
 public abstract class ResourceCalculatorProcessTree extends Configured {
-  static final Log LOG = LogFactory
-      .getLog(ResourceCalculatorProcessTree.class);
+  static final Logger LOG = LoggerFactory
+      .getLogger(ResourceCalculatorProcessTree.class);
   public static final int UNAVAILABLE = -1;
 
   /**

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Times.java

@@ -22,13 +22,14 @@ import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.Date;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 
 @Private
 public class Times {
-  private static final Log LOG = LogFactory.getLog(Times.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(Times.class);
 
   static final String ISO8601DATEFORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ";
 

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/WindowsBasedProcessTree.java

@@ -23,8 +23,8 @@ import java.math.BigInteger;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.util.CpuTimeTracker;
 import org.apache.hadoop.util.Shell;
@@ -34,8 +34,8 @@ import org.apache.hadoop.util.StringUtils;
 @Private
 public class WindowsBasedProcessTree extends ResourceCalculatorProcessTree {
 
-  static final Log LOG = LogFactory
-      .getLog(WindowsBasedProcessTree.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(WindowsBasedProcessTree.class);
 
   static class ProcessInfo {
     String pid; // process pid

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/YarnVersionInfo.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.yarn.util;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.util.VersionInfo;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -30,7 +30,8 @@ import org.apache.hadoop.classification.InterfaceStability;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class YarnVersionInfo extends VersionInfo {
-  private static final Log LOG = LogFactory.getLog(YarnVersionInfo.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(YarnVersionInfo.class);
 
   private static YarnVersionInfo YARN_VERSION_INFO = new YarnVersionInfo();
 

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/resource/DefaultResourceCalculator.java

@@ -17,8 +17,8 @@
 */
 package org.apache.hadoop.yarn.util.resource;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.yarn.api.records.Resource;
@@ -26,8 +26,8 @@ import org.apache.hadoop.yarn.api.records.Resource;
 @Private
 @Unstable
 public class DefaultResourceCalculator extends ResourceCalculator {
-  private static final Log LOG =
-      LogFactory.getLog(DefaultResourceCalculator.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(DefaultResourceCalculator.class);
 
   @Override
   public int compare(Resource unused, Resource lhs, Resource rhs,

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/resource/DominantResourceCalculator.java

@@ -17,8 +17,8 @@
 */
 package org.apache.hadoop.yarn.util.resource;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.yarn.api.records.Resource;
@@ -53,7 +53,8 @@ import java.util.Arrays;
 @Private
 @Unstable
 public class DominantResourceCalculator extends ResourceCalculator {
-  static final Log LOG = LogFactory.getLog(DominantResourceCalculator.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(DominantResourceCalculator.class);
 
   public DominantResourceCalculator() {
   }

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/resource/Resources.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.yarn.util.resource;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
@@ -37,8 +37,8 @@ public class Resources {
 
   private enum RoundingDirection { UP, DOWN }
 
-  private static final Log LOG =
-      LogFactory.getLog(Resources.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(Resources.class);
 
   /**
    * Helper class to create a resource with a fixed value for all resource

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/GenericExceptionHandler.java

@@ -28,8 +28,8 @@ import javax.ws.rs.ext.ExceptionMapper;
 import javax.ws.rs.ext.Provider;
 import javax.xml.bind.UnmarshalException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.security.authorize.AuthorizationException;
@@ -44,8 +44,8 @@ import com.google.inject.Singleton;
 @Singleton
 @Provider
 public class GenericExceptionHandler implements ExceptionMapper<Exception> {
-  public static final Log LOG = LogFactory
-      .getLog(GenericExceptionHandler.class);
+  public static final Logger LOG = LoggerFactory
+      .getLogger(GenericExceptionHandler.class);
 
   private @Context
   HttpServletResponse response;

+ 5 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java

@@ -24,8 +24,8 @@ import java.net.SocketTimeoutException;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.net.NetUtils;
@@ -79,7 +79,8 @@ import org.junit.Test;
  */
 public class TestContainerLaunchRPC {
 
-  static final Log LOG = LogFactory.getLog(TestContainerLaunchRPC.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestContainerLaunchRPC.class);
 
   private static final RecordFactory recordFactory = RecordFactoryProvider
       .getRecordFactory(null);
@@ -171,7 +172,7 @@ public class TestContainerLaunchRPC {
         // make the thread sleep to look like its not going to respond
         Thread.sleep(10000);
       } catch (Exception e) {
-        LOG.error(e);
+        LOG.error(e.toString());
         throw new YarnException(e);
       }
       throw new YarnException("Shouldn't happen!!");

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerResourceIncreaseRPC.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.yarn;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.net.NetUtils;
@@ -74,7 +74,7 @@ import java.util.List;
  */
 public class TestContainerResourceIncreaseRPC {
 
-  static final Log LOG = LogFactory.getLog(
+  private static final Logger LOG = LoggerFactory.getLogger(
       TestContainerResourceIncreaseRPC.class);
 
   @Test
@@ -188,7 +188,7 @@ public class TestContainerResourceIncreaseRPC {
         // make the thread sleep to look like its not going to respond
         Thread.sleep(10000);
       } catch (Exception e) {
-        LOG.error(e);
+        LOG.error(e.toString());
         throw new YarnException(e);
       }
       throw new YarnException("Shouldn't happen!!");

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/BasePBImplRecordsTest.java

@@ -21,8 +21,8 @@ import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 import org.apache.commons.lang3.Range;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.yarn.api.resource.PlacementConstraint;
 import org.apache.hadoop.yarn.api.resource.PlacementConstraints;
 import org.junit.Assert;
@@ -40,7 +40,8 @@ import static org.apache.hadoop.yarn.api.resource.PlacementConstraints.targetIn;
  * Generic helper class to validate protocol records.
  */
 public class BasePBImplRecordsTest {
-  static final Log LOG = LogFactory.getLog(BasePBImplRecordsTest.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(BasePBImplRecordsTest.class);
 
   @SuppressWarnings("checkstyle:visibilitymodifier")
   protected static HashMap<Type, Object> typeValueCache =

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timeline/TestTimelineRecords.java

@@ -27,8 +27,8 @@ import java.util.Set;
 import java.util.TreeMap;
 import java.util.WeakHashMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError;
 import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
 import org.junit.Assert;
@@ -36,8 +36,8 @@ import org.junit.Test;
 
 public class TestTimelineRecords {
 
-  private static final Log LOG =
-      LogFactory.getLog(TestTimelineRecords.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestTimelineRecords.class);
 
   @Test
   public void testEntities() throws Exception {

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timelineservice/TestTimelineServiceRecords.java

@@ -18,8 +18,8 @@
 package org.apache.hadoop.yarn.api.records.timelineservice;
 
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ContainerId;
@@ -36,8 +36,8 @@ import java.util.Map;
 
 
 public class TestTimelineServiceRecords {
-  private static final Log LOG =
-      LogFactory.getLog(TestTimelineServiceRecords.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestTimelineServiceRecords.class);
 
   @Test
   public void testTimelineEntities() throws Exception {

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientForATS1_5.java

@@ -30,8 +30,8 @@ import java.io.File;
 import java.io.IOException;
 import java.net.URI;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileContext;
 import org.apache.hadoop.fs.Path;
@@ -52,8 +52,8 @@ import com.sun.jersey.api.client.ClientResponse;
 
 public class TestTimelineClientForATS1_5 {
 
-  protected static Log LOG = LogFactory
-    .getLog(TestTimelineClientForATS1_5.class);
+  private final static Logger LOG = LoggerFactory
+      .getLogger(TestTimelineClientForATS1_5.class);
 
   private TimelineClientImpl client;
   private static FileContext localFS;

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientV2Impl.java

@@ -30,8 +30,8 @@ import java.util.List;
 
 import javax.ws.rs.core.MultivaluedMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -50,8 +50,8 @@ import org.junit.Test;
 import org.junit.rules.TestName;
 
 public class TestTimelineClientV2Impl {
-  private static final Log LOG =
-      LogFactory.getLog(TestTimelineClientV2Impl.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestTimelineClientV2Impl.class);
   private TestV2TimelineClient client;
   private static final long TIME_TO_SLEEP = 150L;
   private static final String EXCEPTION_MSG = "Exception in the content";

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/event/InlineDispatcher.java

@@ -18,15 +18,16 @@
 
 package org.apache.hadoop.yarn.event;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.yarn.event.AsyncDispatcher;
 import org.apache.hadoop.yarn.event.Event;
 import org.apache.hadoop.yarn.event.EventHandler;
 
 @SuppressWarnings({"unchecked", "rawtypes"})
 public class InlineDispatcher extends AsyncDispatcher {
-  private static final Log LOG = LogFactory.getLog(InlineDispatcher.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(InlineDispatcher.class);
 
   private class TestEventHandler implements EventHandler {
     @Override

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java

@@ -38,8 +38,8 @@ import java.util.Collections;
 import java.util.concurrent.CountDownLatch;
 
 import org.junit.Assert;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.fs.FileStatus;
@@ -70,8 +70,8 @@ public class TestAggregatedLogFormat {
   private static final Configuration conf = new Configuration();
   private static final FileSystem fs;
   private static final char filler = 'x';
-  private static final Log LOG = LogFactory
-      .getLog(TestAggregatedLogFormat.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(TestAggregatedLogFormat.class);
 
   static {
     try {

+ 17 - 14
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestAdHocLogDumper.java

@@ -18,14 +18,14 @@
 
 package org.apache.hadoop.yarn.util;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.impl.Log4JLogger;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.util.Time;
 import org.apache.log4j.Appender;
 import org.apache.log4j.AppenderSkeleton;
-import org.apache.log4j.Logger;
 import org.apache.log4j.Priority;
+import org.apache.log4j.LogManager;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -34,20 +34,22 @@ import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.Map;
 
+import static org.apache.hadoop.util.GenericsUtil.isLog4jLogger;
+
 public class TestAdHocLogDumper {
 
-  private static final Log LOG = LogFactory.getLog(TestAdHocLogDumper.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestAdHocLogDumper.class);
 
   @Test
   public void testDumpingSchedulerLogs() throws Exception {
 
     Map<Appender, Priority> levels = new HashMap<>();
-    String logHierarchy = TestAdHocLogDumper.class.getName();
     String logFilename = "test.log";
-    Log log = LogFactory.getLog(logHierarchy);
-    if (log instanceof Log4JLogger) {
-      for (Enumeration appenders = Logger.getRootLogger().getAllAppenders(); appenders
-        .hasMoreElements();) {
+    Logger logger = LoggerFactory.getLogger(TestAdHocLogDumper.class);
+    if (isLog4jLogger(this.getClass())) {
+      for (Enumeration appenders = LogManager.getRootLogger().
+          getAllAppenders(); appenders.hasMoreElements();) {
         Object obj = appenders.nextElement();
         if (obj instanceof AppenderSkeleton) {
           AppenderSkeleton appender = (AppenderSkeleton) obj;
@@ -56,7 +58,8 @@ public class TestAdHocLogDumper {
       }
     }
 
-    AdHocLogDumper dumper = new AdHocLogDumper(logHierarchy, logFilename);
+    AdHocLogDumper dumper = new AdHocLogDumper(this.getClass().getName(),
+        logFilename);
     dumper.dumpLogs("DEBUG", 1000);
     LOG.debug("test message 1");
     LOG.info("test message 2");
@@ -68,9 +71,9 @@ public class TestAdHocLogDumper {
     Assert.assertTrue(logFile.length() != 0);
 
     // make sure levels are set back to their original values
-    if (log instanceof Log4JLogger) {
-      for (Enumeration appenders = Logger.getRootLogger().getAllAppenders(); appenders
-        .hasMoreElements();) {
+    if (isLog4jLogger(this.getClass())) {
+      for (Enumeration appenders = LogManager.getRootLogger().
+          getAllAppenders(); appenders.hasMoreElements();) {
         Object obj = appenders.nextElement();
         if (obj instanceof AppenderSkeleton) {
           AppenderSkeleton appender = (AppenderSkeleton) obj;

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestFSDownload.java

@@ -58,8 +58,8 @@ import org.junit.Assert;
 
 import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
 import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -87,7 +87,8 @@ import com.google.common.cache.LoadingCache;
  */
 public class TestFSDownload {
 
-  private static final Log LOG = LogFactory.getLog(TestFSDownload.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestFSDownload.class);
   private static AtomicLong uniqueNumberGenerator =
     new AtomicLong(System.currentTimeMillis());
   private enum TEST_FILE_TYPE {

+ 19 - 13
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestLog4jWarningErrorMetricsAppender.java

@@ -18,14 +18,17 @@
 
 package org.apache.hadoop.yarn.util;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.Marker;
+import org.slf4j.MarkerFactory;
+import org.apache.log4j.LogManager;
 import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
+import org.apache.hadoop.util.Time;
 import org.junit.Assert;
 import org.junit.Test;
 
+
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
@@ -33,7 +36,10 @@ import java.util.Map;
 public class TestLog4jWarningErrorMetricsAppender {
 
   Log4jWarningErrorMetricsAppender appender;
-  Log logger = LogFactory.getLog(TestLog4jWarningErrorMetricsAppender.class);
+  private static final Logger LOG = LoggerFactory.
+      getLogger(TestLog4jWarningErrorMetricsAppender.class);
+  private static final Marker FATAL =
+      MarkerFactory.getMarker("FATAL");
   List<Long> cutoff = new ArrayList<>();
 
   void setupAppender(int cleanupIntervalSeconds, long messageAgeLimitSeconds,
@@ -42,33 +48,33 @@ public class TestLog4jWarningErrorMetricsAppender {
     appender =
         new Log4jWarningErrorMetricsAppender(cleanupIntervalSeconds,
           messageAgeLimitSeconds, maxUniqueMessages);
-    Logger.getRootLogger().addAppender(appender);
+    LogManager.getRootLogger().addAppender(appender);
   }
 
   void removeAppender() {
-    Logger.getRootLogger().removeAppender(appender);
+    LogManager.getRootLogger().removeAppender(appender);
   }
 
   void logMessages(Level level, String message, int count) {
     for (int i = 0; i < count; ++i) {
       switch (level.toInt()) {
       case Level.FATAL_INT:
-        logger.fatal(message);
+        LOG.error(FATAL, message);
         break;
       case Level.ERROR_INT:
-        logger.error(message);
+        LOG.error(message);
         break;
       case Level.WARN_INT:
-        logger.warn(message);
+        LOG.warn(message);
         break;
       case Level.INFO_INT:
-        logger.info(message);
+        LOG.info(message);
         break;
       case Level.DEBUG_INT:
-        logger.debug(message);
+        LOG.debug(message);
         break;
       case Level.TRACE_INT:
-        logger.trace(message);
+        LOG.trace(message);
         break;
       }
     }

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestProcfsBasedProcessTree.java

@@ -38,8 +38,8 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.commons.io.FileUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileContext;
 import org.apache.hadoop.fs.FileUtil;
@@ -61,8 +61,8 @@ import org.junit.Test;
  */
 public class TestProcfsBasedProcessTree {
 
-  private static final Log LOG = LogFactory
-    .getLog(TestProcfsBasedProcessTree.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(TestProcfsBasedProcessTree.class);
   protected static File TEST_ROOT_DIR = new File("target",
     TestProcfsBasedProcessTree.class.getName() + "-localDir");
 

+ 4 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestRackResolver.java

@@ -24,8 +24,8 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.net.DNSToSwitchMapping;
@@ -37,7 +37,8 @@ import org.junit.Test;
 
 public class TestRackResolver {
 
-  private static Log LOG = LogFactory.getLog(TestRackResolver.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestRackResolver.class);
   private static final String invalidHost = "invalidHost";
 
   @Before

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsBasedProcessTree.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.yarn.util;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -27,8 +27,8 @@ import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
 import static org.junit.Assert.assertTrue;
 
 public class TestWindowsBasedProcessTree {
-  private static final Log LOG = LogFactory
-      .getLog(TestWindowsBasedProcessTree.class);
+  private static final Logger LOG = LoggerFactory
+      .getLogger(TestWindowsBasedProcessTree.class);
 
   class WindowsBasedProcessTreeTester extends WindowsBasedProcessTree {
     String infoStr = null;