瀏覽代碼

HADOOP-15552. Move logging APIs over to slf4j in hadoop-tools - Part2. Contributed by Ian Pickering.

Akira Ajisaka 6 年之前
父節點
當前提交
3e3963b035
共有 91 個文件被更改,包括 282 次插入280 次删除
  1. 2 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractRenameTest.java
  2. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractSeekTest.java
  3. 3 3
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractFSContractTestBase.java
  4. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMetadataConsistency.java
  5. 3 3
      hadoop-tools/hadoop-aliyun/src/main/java/org/apache/hadoop/fs/aliyun/oss/AliyunOSSInputStream.java
  6. 3 3
      hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java
  7. 4 4
      hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
  8. 3 3
      hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java
  9. 1 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractGetFileStatus.java
  10. 1 1
      hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AContractGetFileStatusV1List.java
  11. 3 3
      hadoop-tools/hadoop-datajoin/src/main/java/org/apache/hadoop/contrib/utils/join/JobBase.java
  12. 3 3
      hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java
  13. 3 3
      hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
  14. 3 3
      hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/GlobbedCopyListing.java
  15. 3 3
      hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/OptionsParser.java
  16. 4 4
      hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/RegexCopyFilter.java
  17. 5 5
      hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
  18. 3 3
      hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
  19. 4 4
      hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
  20. 4 4
      hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/UniformSizeInputFormat.java
  21. 4 4
      hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputChunk.java
  22. 3 3
      hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputChunkContext.java
  23. 3 3
      hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputFormat.java
  24. 3 3
      hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicRecordReader.java
  25. 3 3
      hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
  26. 3 3
      hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/ProducerConsumer.java
  27. 3 3
      hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/RetriableCommand.java
  28. 3 3
      hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java
  29. 4 4
      hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpSystem.java
  30. 3 3
      hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java
  31. 3 3
      hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestExternalCall.java
  32. 3 3
      hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestFileBasedCopyListing.java
  33. 3 3
      hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java
  34. 1 1
      hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/contract/AbstractContractDistCpTest.java
  35. 3 3
      hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyCommitter.java
  36. 3 3
      hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
  37. 3 3
      hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyOutputFormat.java
  38. 3 3
      hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/lib/TestDynamicInputFormat.java
  39. 3 3
      hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/TestDistCpUtils.java
  40. 4 4
      hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/TestThrottledInputStream.java
  41. 3 3
      hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistTool.java
  42. 3 3
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/ClusterSummarizer.java
  43. 3 3
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/CompressionEmulationUtil.java
  44. 4 4
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/DistributedCacheEmulator.java
  45. 3 3
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/EchoUserResolver.java
  46. 3 3
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/ExecutionSummarizer.java
  47. 3 3
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/FilePool.java
  48. 3 3
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJob.java
  49. 3 3
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/InputStriper.java
  50. 3 3
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/JobFactory.java
  51. 3 3
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/JobMonitor.java
  52. 3 3
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/JobSubmitter.java
  53. 3 3
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/LoadJob.java
  54. 3 3
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/RandomTextDataGenerator.java
  55. 3 3
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/ReplayJobFactory.java
  56. 3 3
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/RoundRobinUserResolver.java
  57. 3 3
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/SerialJobFactory.java
  58. 3 3
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/SleepJob.java
  59. 3 3
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/Statistics.java
  60. 3 3
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/StressJobFactory.java
  61. 3 3
      hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/SubmitterUserResolver.java
  62. 3 3
      hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/CommonJobTest.java
  63. 3 3
      hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/DebugJobProducer.java
  64. 3 3
      hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/GridmixTestUtils.java
  65. 3 3
      hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestFilePool.java
  66. 3 3
      hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestFileQueue.java
  67. 3 3
      hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridMixClasses.java
  68. 3 3
      hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridmixRecord.java
  69. 3 3
      hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestRecordFactory.java
  70. 4 4
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/DeskewedJobTraceReader.java
  71. 3 3
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Folder.java
  72. 3 3
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java
  73. 3 3
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HistoryEventEmitter.java
  74. 3 3
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
  75. 3 3
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ParsedJob.java
  76. 3 3
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ParsedTask.java
  77. 3 3
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ParsedTaskAttempt.java
  78. 3 3
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/RandomSeedGenerator.java
  79. 4 4
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/TraceBuilder.java
  80. 3 3
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ZombieJob.java
  81. 3 3
      hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/resourcemanager/MockAMLauncher.java
  82. 3 3
      hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthJob.java
  83. 3 3
      hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java
  84. 9 8
      hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeMapRed.java
  85. 3 2
      hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamBaseRecordReader.java
  86. 3 3
      hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java
  87. 4 4
      hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/mapreduce/StreamBaseRecordReader.java
  88. 3 3
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java
  89. 3 3
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamXmlMultipleRecords.java
  90. 4 4
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java
  91. 3 3
      hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/UtilTest.java

+ 2 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractRenameTest.java

@@ -69,7 +69,7 @@ public abstract class AbstractContractRenameTest extends
       } else {
         // at least one FS only returns false here, if that is the case
         // warn but continue
-        getLog().warn("Rename returned {} renaming a nonexistent file", renamed);
+        getLogger().warn("Rename returned {} renaming a nonexistent file", renamed);
         assertFalse("Renaming a missing file returned true", renamed);
       }
     } catch (FileNotFoundException e) {
@@ -118,7 +118,7 @@ public abstract class AbstractContractRenameTest extends
         if (renamed && !renameReturnsFalseOnRenameDestExists) {
           //expected an exception
           String destDirLS = generateAndLogErrorListing(srcFile, destFile);
-          getLog().error("dest dir {}", destDirLS);
+          getLogger().error("dest dir {}", destDirLS);
           fail("expected rename(" + srcFile + ", " + destFile + " ) to fail," +
                " but got success and destination of " + destDirLS);
         }

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractSeekTest.java

@@ -132,7 +132,7 @@ public abstract class AbstractContractSeekTest extends AbstractFSContractTestBas
   @Test
   public void testSeekReadClosedFile() throws Throwable {
     instream = getFileSystem().open(smallSeekFile);
-    getLog().debug(
+    getLogger().debug(
       "Stream is of type " + instream.getClass().getCanonicalName());
     instream.close();
     try {

+ 3 - 3
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractFSContractTestBase.java

@@ -110,7 +110,7 @@ public abstract class AbstractFSContractTestBase extends Assert
    * Get the log of the base class.
    * @return a logger
    */
-  public static Logger getLog() {
+  public static Logger getLogger() {
     return LOG;
   }
 
@@ -281,7 +281,7 @@ public abstract class AbstractFSContractTestBase extends Assert
    * @param e exception raised.
    */
   protected void handleExpectedException(Exception e) {
-    getLog().debug("expected :{}" ,e, e);
+    getLogger().debug("expected :{}" ,e, e);
   }
 
   /**
@@ -366,7 +366,7 @@ public abstract class AbstractFSContractTestBase extends Assert
   protected String generateAndLogErrorListing(Path src, Path dst) throws
                                                                   IOException {
     FileSystem fs = getFileSystem();
-    getLog().error(
+    getLogger().error(
       "src dir " + ContractTestUtils.ls(fs, src.getParent()));
     String destDirLS = ContractTestUtils.ls(fs, dst.getParent());
     if (fs.isDirectory(dst)) {

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMetadataConsistency.java

@@ -177,4 +177,4 @@ public class TestNameNodeMetadataConsistency {
       }
     }, SCAN_WAIT * 1000, 60000);
   }
-}
+}

+ 3 - 3
hadoop-tools/hadoop-aliyun/src/main/java/org/apache/hadoop/fs/aliyun/oss/AliyunOSSInputStream.java

@@ -25,8 +25,8 @@ import java.util.Queue;
 import java.util.concurrent.ExecutorService;
 
 import com.google.common.util.concurrent.MoreExecutors;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSExceptionMessages;
 import org.apache.hadoop.fs.FSInputStream;
@@ -40,7 +40,7 @@ import static org.apache.hadoop.fs.aliyun.oss.Constants.*;
  * stream.
  */
 public class AliyunOSSInputStream extends FSInputStream {
-  public static final Log LOG = LogFactory.getLog(AliyunOSSInputStream.class);
+  public static final Logger LOG = LoggerFactory.getLogger(AliyunOSSInputStream.class);
   private final long downloadPartSize;
   private AliyunOSSFileSystemStore store;
   private final String key;

+ 3 - 3
hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java

@@ -27,8 +27,8 @@ import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.commons.io.output.FileWriterWithEncoding;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -70,7 +70,7 @@ import java.util.Set;
  * {@link HadoopArchiveLogsRunner}.
  */
 public class HadoopArchiveLogs implements Tool {
-  private static final Log LOG = LogFactory.getLog(HadoopArchiveLogs.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HadoopArchiveLogs.class);
 
   private static final String HELP_OPTION = "help";
   private static final String MAX_ELIGIBLE_APPS_OPTION = "maxEligibleApps";

+ 4 - 4
hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java

@@ -25,8 +25,8 @@ import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -47,8 +47,8 @@ import java.security.PrivilegedExceptionAction;
  * tool via the Distributed Shell.  It's not meant to be run directly.
  */
 public class HadoopArchiveLogsRunner implements Tool {
-  private static final Log LOG =
-      LogFactory.getLog(HadoopArchiveLogsRunner.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(HadoopArchiveLogsRunner.class);
 
   private static final String APP_ID_OPTION = "appId";
   private static final String USER_OPTION = "user";

+ 3 - 3
hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java

@@ -37,8 +37,8 @@ import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.Parser;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -82,7 +82,7 @@ import com.google.common.base.Charsets;
  */
 public class HadoopArchives implements Tool {
   public static final int VERSION = 3;
-  private static final Log LOG = LogFactory.getLog(HadoopArchives.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HadoopArchives.class);
   
   private static final String NAME = "har"; 
   private static final String ARCHIVE_NAME = "archiveName";

+ 1 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractGetFileStatus.java

@@ -41,7 +41,7 @@ public class ITestS3AContractGetFileStatus
 
   @Override
   public void teardown() throws Exception {
-    getLog().info("FS details {}", getFileSystem());
+    getLogger().info("FS details {}", getFileSystem());
     super.teardown();
   }
 

+ 1 - 1
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AContractGetFileStatusV1List.java

@@ -41,7 +41,7 @@ public class ITestS3AContractGetFileStatusV1List
 
   @Override
   public void teardown() throws Exception {
-    getLog().info("FS details {}", getFileSystem());
+    getLogger().info("FS details {}", getFileSystem());
     super.teardown();
   }
 

+ 3 - 3
hadoop-tools/hadoop-datajoin/src/main/java/org/apache/hadoop/contrib/utils/join/JobBase.java

@@ -23,8 +23,8 @@ import java.util.TreeMap;
 import java.util.Map.Entry;
 import java.util.Iterator;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.Reducer;
@@ -36,7 +36,7 @@ import org.apache.hadoop.mapred.Reducer;
  */
 public abstract class JobBase implements Mapper, Reducer {
 
-  public static final Log LOG = LogFactory.getLog("datajoin.job");
+  public static final Logger LOG = LoggerFactory.getLogger("datajoin.job");
 
   private SortedMap<Object, Long> longCounters = null;
 

+ 3 - 3
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java

@@ -27,8 +27,8 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.tools.util.DistCpUtils;
 import org.apache.hadoop.security.Credentials;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.lang.reflect.Constructor;
@@ -48,7 +48,7 @@ import com.google.common.collect.Sets;
 public abstract class CopyListing extends Configured {
 
   private Credentials credentials;
-  static final Log LOG = LogFactory.getLog(DistCp.class);
+  static final Logger LOG = LoggerFactory.getLogger(DistCp.class);
   /**
    * Build listing function creates the input listing that distcp uses to
    * perform the copy.

+ 3 - 3
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java

@@ -22,8 +22,8 @@ import java.io.IOException;
 import java.util.Random;
 
 import com.google.common.base.Preconditions;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -64,7 +64,7 @@ public class DistCp extends Configured implements Tool {
    */
   static final int SHUTDOWN_HOOK_PRIORITY = 30;
 
-  static final Log LOG = LogFactory.getLog(DistCp.class);
+  static final Logger LOG = LoggerFactory.getLogger(DistCp.class);
 
   @VisibleForTesting
   DistCpContext context;

+ 3 - 3
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/GlobbedCopyListing.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.tools;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -35,7 +35,7 @@ import java.util.ArrayList;
  * listing-file by "globbing" all specified source paths (wild-cards and all.)
  */
 public class GlobbedCopyListing extends CopyListing {
-  private static final Log LOG = LogFactory.getLog(GlobbedCopyListing.class);
+  private static final Logger LOG = LoggerFactory.getLogger(GlobbedCopyListing.class);
 
   private final CopyListing simpleListing;
   /**

+ 3 - 3
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/OptionsParser.java

@@ -29,8 +29,8 @@ import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 
 import com.google.common.base.Preconditions;
@@ -41,7 +41,7 @@ import com.google.common.base.Preconditions;
  */
 public class OptionsParser {
 
-  static final Log LOG = LogFactory.getLog(OptionsParser.class);
+  static final Logger LOG = LoggerFactory.getLogger(OptionsParser.class);
 
   private static final Options cliOptions = new Options();
 

+ 4 - 4
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/RegexCopyFilter.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.tools;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
 
@@ -43,7 +43,7 @@ import com.google.common.annotations.VisibleForTesting;
  */
 public class RegexCopyFilter extends CopyFilter {
 
-  private static final Log LOG = LogFactory.getLog(RegexCopyFilter.class);
+  private static final Logger LOG = LoggerFactory.getLogger(RegexCopyFilter.class);
   private File filtersFile;
   private List<Pattern> filters;
 
@@ -77,7 +77,7 @@ public class RegexCopyFilter extends CopyFilter {
       LOG.error("An error occurred while attempting to read from " +
           filtersFile);
     } finally {
-      IOUtils.cleanup(LOG, reader);
+      IOUtils.cleanupWithLogger(LOG, reader);
     }
   }
 

+ 5 - 5
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java

@@ -20,8 +20,8 @@ package org.apache.hadoop.tools;
 
 import com.google.common.collect.Lists;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileStatus;
@@ -60,7 +60,7 @@ import static org.apache.hadoop.tools.DistCpConstants
  * Note: The SimpleCopyListing doesn't handle wild-cards in the input-paths.
  */
 public class SimpleCopyListing extends CopyListing {
-  private static final Log LOG = LogFactory.getLog(SimpleCopyListing.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SimpleCopyListing.class);
 
   public static final int DEFAULT_FILE_STATUS_SIZE = 1000;
   public static final boolean DEFAULT_RANDOMIZE_FILE_LISTING = true;
@@ -309,7 +309,7 @@ public class SimpleCopyListing extends CopyListing {
       fileListWriter.close();
       fileListWriter = null;
     } finally {
-      IOUtils.cleanup(LOG, fileListWriter);
+      IOUtils.cleanupWithLogger(LOG, fileListWriter);
     }
   }
 
@@ -402,7 +402,7 @@ public class SimpleCopyListing extends CopyListing {
       LOG.info("Build file listing completed.");
       fileListWriter = null;
     } finally {
-      IOUtils.cleanup(LOG, fileListWriter);
+      IOUtils.cleanupWithLogger(LOG, fileListWriter);
     }
   }
 

+ 3 - 3
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java

@@ -23,8 +23,8 @@ import java.io.IOException;
 import java.util.EnumSet;
 
 import org.apache.commons.lang3.exception.ExceptionUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileChecksum;
 import org.apache.hadoop.fs.FileStatus;
@@ -74,7 +74,7 @@ public class CopyMapper extends Mapper<Text, CopyListingFileStatus, Text, Text>
     OVERWRITE,    // Overwrite the whole file
   }
 
-  private static Log LOG = LogFactory.getLog(CopyMapper.class);
+  private static Logger LOG = LoggerFactory.getLogger(CopyMapper.class);
 
   private Configuration conf;
 

+ 4 - 4
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java

@@ -23,8 +23,8 @@ import java.io.IOException;
 import java.io.OutputStream;
 import java.util.EnumSet;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CreateFlag;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -53,7 +53,7 @@ import com.google.common.annotations.VisibleForTesting;
  */
 public class RetriableFileCopyCommand extends RetriableCommand {
 
-  private static Log LOG = LogFactory.getLog(RetriableFileCopyCommand.class);
+  private static Logger LOG = LoggerFactory.getLogger(RetriableFileCopyCommand.class);
   private boolean skipCrc = false;
   private FileAction action;
 
@@ -297,7 +297,7 @@ public class RetriableFileCopyCommand extends RetriableCommand {
       outStream.close();
       outStream = null;
     } finally {
-      IOUtils.cleanup(LOG, outStream, inStream);
+      IOUtils.cleanupWithLogger(LOG, outStream, inStream);
     }
     return totalBytesRead;
   }

+ 4 - 4
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/UniformSizeInputFormat.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.tools.mapred;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.IOUtils;
@@ -50,8 +50,8 @@ import java.util.ArrayList;
  */
 public class UniformSizeInputFormat
     extends InputFormat<Text, CopyListingFileStatus> {
-  private static final Log LOG
-                = LogFactory.getLog(UniformSizeInputFormat.class);
+  private static final Logger LOG
+                = LoggerFactory.getLogger(UniformSizeInputFormat.class);
 
   /**
    * Implementation of InputFormat::getSplits(). Returns a list of InputSplits,

+ 4 - 4
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputChunk.java

@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.tools.mapred.lib;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Text;
@@ -42,7 +42,7 @@ import java.io.IOException;
  * consumed.
  */
 class DynamicInputChunk<K, V> {
-  private static Log LOG = LogFactory.getLog(DynamicInputChunk.class);
+  private static Logger LOG = LoggerFactory.getLogger(DynamicInputChunk.class);
   private Path chunkFilePath;
   private SequenceFileRecordReader<K, V> reader;
   private SequenceFile.Writer writer;
@@ -78,7 +78,7 @@ class DynamicInputChunk<K, V> {
    * Closes streams opened to the chunk-file.
    */
   public void close() {
-    IOUtils.cleanup(LOG, reader, writer);
+    IOUtils.cleanupWithLogger(LOG, reader, writer);
   }
 
   /**

+ 3 - 3
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputChunkContext.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.tools.mapred.lib;
 
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.Log;
+import org.slf4j.LoggerFactory;
+import org.slf4j.Logger;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileSystem;
@@ -34,7 +34,7 @@ import java.io.IOException;
  */
 class DynamicInputChunkContext<K, V> {
 
-  private static Log LOG = LogFactory.getLog(DynamicInputChunkContext.class);
+  private static Logger LOG = LoggerFactory.getLogger(DynamicInputChunkContext.class);
   private Configuration configuration;
   private Path chunkRootPath = null;
   private String chunkFilePrefix;

+ 3 - 3
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputFormat.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.tools.mapred.lib;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.mapreduce.*;
 import org.apache.hadoop.mapreduce.lib.input.FileSplit;
 import org.apache.hadoop.tools.DistCpConstants;
@@ -49,7 +49,7 @@ import java.io.IOException;
  * performance characteristics. 
  */
 public class DynamicInputFormat<K, V> extends InputFormat<K, V> {
-  private static final Log LOG = LogFactory.getLog(DynamicInputFormat.class);
+  private static final Logger LOG = LoggerFactory.getLogger(DynamicInputFormat.class);
 
   private static final String CONF_LABEL_LISTING_SPLIT_RATIO
           = "mapred.listing.split.ratio";

+ 3 - 3
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicRecordReader.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.tools.mapred.lib;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.tools.util.DistCpUtils;
 import org.apache.hadoop.tools.DistCpConstants;
 import org.apache.hadoop.mapreduce.*;
@@ -37,7 +37,7 @@ import java.util.concurrent.TimeUnit;
  *    transparently.
  */
 public class DynamicRecordReader<K, V> extends RecordReader<K, V> {
-  private static final Log LOG = LogFactory.getLog(DynamicRecordReader.class);
+  private static final Logger LOG = LoggerFactory.getLogger(DynamicRecordReader.class);
   private TaskAttemptContext taskAttemptContext;
   private Configuration configuration;
   private DynamicInputChunk<K, V> chunk;

+ 3 - 3
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java

@@ -20,8 +20,8 @@ package org.apache.hadoop.tools.util;
 
 import com.google.common.collect.Maps;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.FileChecksum;
@@ -56,7 +56,7 @@ import java.util.Map.Entry;
  */
 public class DistCpUtils {
 
-  private static final Log LOG = LogFactory.getLog(DistCpUtils.class);
+  private static final Logger LOG = LoggerFactory.getLogger(DistCpUtils.class);
 
   /**
    * Retrieves size of the file at the specified path.

+ 3 - 3
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/ProducerConsumer.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.tools.util;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
@@ -34,7 +34,7 @@ import java.util.concurrent.atomic.AtomicInteger;
  * WorkReport{@literal <R>} to the outputQueue.
  */
 public class ProducerConsumer<T, R> {
-  private Log LOG = LogFactory.getLog(ProducerConsumer.class);
+  private Logger LOG = LoggerFactory.getLogger(ProducerConsumer.class);
   private LinkedBlockingQueue<WorkRequest<T>> inputQueue;
   private LinkedBlockingQueue<WorkReport<R>> outputQueue;
   private ExecutorService executor;

+ 3 - 3
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/RetriableCommand.java

@@ -19,8 +19,8 @@
 
 package org.apache.hadoop.tools.util;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryPolicy.RetryAction;
 import org.apache.hadoop.io.retry.RetryPolicies;
@@ -35,7 +35,7 @@ import java.util.concurrent.TimeUnit;
  */
 public abstract class RetriableCommand {
 
-  private static Log LOG = LogFactory.getLog(RetriableCommand.class);
+  private static Logger LOG = LoggerFactory.getLogger(RetriableCommand.class);
 
   private static final long DELAY_MILLISECONDS = 500;
   private static final int  MAX_RETRIES        = 3;

+ 3 - 3
hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java

@@ -20,8 +20,8 @@ package org.apache.hadoop.tools;
 
 import static org.mockito.Mockito.*;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.conf.Configuration;
@@ -51,7 +51,7 @@ import java.util.Random;
 
 @RunWith(value = Parameterized.class)
 public class TestCopyListing extends SimpleCopyListing {
-  private static final Log LOG = LogFactory.getLog(TestCopyListing.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestCopyListing.class);
 
   private static final Credentials CREDENTIALS = new Credentials();
 

+ 4 - 4
hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpSystem.java

@@ -31,8 +31,8 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileStatus;
@@ -57,8 +57,8 @@ import org.junit.rules.Timeout;
  */
 
 public class TestDistCpSystem {
-  private static final Log LOG =
-      LogFactory.getLog(TestDistCpSystem.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestDistCpSystem.class);
 
   @Rule
   public Timeout globalTimeout = new Timeout(30000);

+ 3 - 3
hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java

@@ -18,9 +18,9 @@
 
 package org.apache.hadoop.tools;
 
-import org.apache.commons.logging.Log;
+import org.slf4j.Logger;
 import org.apache.hadoop.fs.viewfs.*;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -37,7 +37,7 @@ import java.net.URI;
 import java.net.URISyntaxException;
 
 public class TestDistCpViewFs {
-  private static final Log LOG = LogFactory.getLog(TestDistCpViewFs.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestDistCpViewFs.class);
 
   private static FileSystem fs;
 

+ 3 - 3
hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestExternalCall.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.tools;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -37,7 +37,7 @@ import java.security.Permission;
 
 public class TestExternalCall {
 
-  private static final Log LOG = LogFactory.getLog(TestExternalCall.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestExternalCall.class);
 
   private static FileSystem fs;
 

+ 3 - 3
hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestFileBasedCopyListing.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.tools;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -40,7 +40,7 @@ import java.util.HashMap;
 import java.util.Map;
 
 public class TestFileBasedCopyListing {
-  private static final Log LOG = LogFactory.getLog(TestFileBasedCopyListing.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestFileBasedCopyListing.class);
 
   private static final Credentials CREDENTIALS = new Credentials();
 

+ 3 - 3
hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.tools;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -43,7 +43,7 @@ import java.util.List;
 
 @RunWith(value = Parameterized.class)
 public class TestIntegration {
-  private static final Log LOG = LogFactory.getLog(TestIntegration.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestIntegration.class);
 
   private static FileSystem fs;
 

+ 1 - 1
hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/contract/AbstractContractDistCpTest.java

@@ -523,7 +523,7 @@ public abstract class AbstractContractDistCpTest
     int fileSizeKb = conf.getInt(SCALE_TEST_DISTCP_FILE_SIZE_KB,
         DEFAULT_DISTCP_SIZE_KB);
     int fileSizeMb = fileSizeKb / 1024;
-    getLog().info("{} with file size {}", testName.getMethodName(), fileSizeMb);
+    getLogger().info("{} with file size {}", testName.getMethodName(), fileSizeMb);
     byte[] data1 = dataset((fileSizeMb + 1) * MB, 33, 43);
     createFile(srcFS, largeFile1, true, data1);
     byte[] data2 = dataset((fileSizeMb + 2) * MB, 43, 53);

+ 3 - 3
hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyCommitter.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.tools.mapred;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -47,7 +47,7 @@ import static org.apache.hadoop.fs.contract.ContractTestUtils.*;
 import static org.apache.hadoop.tools.util.TestDistCpUtils.*;
 
 public class TestCopyCommitter {
-  private static final Log LOG = LogFactory.getLog(TestCopyCommitter.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestCopyCommitter.class);
 
   private static final Random rand = new Random();
 

+ 3 - 3
hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java

@@ -27,8 +27,8 @@ import java.util.EnumSet;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CreateFlag;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -62,7 +62,7 @@ import static org.apache.hadoop.test.MetricsAsserts.getLongCounter;
 import static org.apache.hadoop.test.MetricsAsserts.getMetrics;
 
 public class TestCopyMapper {
-  private static final Log LOG = LogFactory.getLog(TestCopyMapper.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestCopyMapper.class);
   private static List<Path> pathList = new ArrayList<Path>();
   private static int nFiles = 0;
   private static final int DEFAULT_FILE_SIZE = 1024;

+ 3 - 3
hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyOutputFormat.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.tools.mapred;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.mapreduce.*;
 import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
 import org.apache.hadoop.mapreduce.task.JobContextImpl;
@@ -32,7 +32,7 @@ import org.junit.Assert;
 import java.io.IOException;
 
 public class TestCopyOutputFormat {
-  private static final Log LOG = LogFactory.getLog(TestCopyOutputFormat.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestCopyOutputFormat.class);
 
   @Test
   public void testSetCommitDirectory() {

+ 3 - 3
hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/lib/TestDynamicInputFormat.java

@@ -21,8 +21,8 @@ package org.apache.hadoop.tools.mapred.lib;
 import org.apache.hadoop.tools.DistCpConstants;
 import org.apache.hadoop.tools.DistCpContext;
 import org.junit.Assert;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -46,7 +46,7 @@ import java.util.ArrayList;
 import java.util.List;
 
 public class TestDynamicInputFormat {
-  private static final Log LOG = LogFactory.getLog(TestDynamicInputFormat.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestDynamicInputFormat.class);
   private static MiniDFSCluster cluster;
   private static final int N_FILES = 1000;
   private static final int NUM_SPLITS = 7;

+ 3 - 3
hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/TestDistCpUtils.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.tools.util;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -50,7 +50,7 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
 public class TestDistCpUtils {
-  private static final Log LOG = LogFactory.getLog(TestDistCpUtils.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestDistCpUtils.class);
 
   private static final Configuration config = new Configuration();
   private static MiniDFSCluster cluster;

+ 4 - 4
hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/TestThrottledInputStream.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.tools.util;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.io.IOUtils;
 import org.junit.Assert;
 import org.junit.Test;
@@ -27,7 +27,7 @@ import org.junit.Test;
 import java.io.*;
 
 public class TestThrottledInputStream {
-  private static final Log LOG = LogFactory.getLog(TestThrottledInputStream.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestThrottledInputStream.class);
   private static final int BUFF_SIZE = 1024;
 
   private enum CB {ONE_C, BUFFER, BUFF_OFFSET}
@@ -89,7 +89,7 @@ public class TestThrottledInputStream {
         copyByteByByte(in, out);
       }
 
-      LOG.info(in);
+      LOG.info("{}", in);
       bandwidth = in.getBytesPerSec();
       Assert.assertEquals(in.getTotalBytesRead(), tmpFile.length());
       Assert.assertTrue(in.getBytesPerSec() > maxBandwidth / (factor * 1.2));

+ 3 - 3
hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistTool.java

@@ -27,8 +27,8 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -40,7 +40,7 @@ import org.apache.hadoop.mapred.JobConf;
  * An abstract class for distributed tool for file related operations.
  */
 abstract class DistTool implements org.apache.hadoop.util.Tool {
-  protected static final Log LOG = LogFactory.getLog(DistTool.class);
+  protected static final Logger LOG = LoggerFactory.getLogger(DistTool.class);
 
   protected JobConf jobconf;
 

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/ClusterSummarizer.java

@@ -18,8 +18,8 @@
 package org.apache.hadoop.mapred.gridmix;
 
 import org.apache.commons.lang3.time.FastDateFormat;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.FileSystem;
@@ -40,7 +40,7 @@ import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
  * addresses are also recorded in the summary.
  */
 class ClusterSummarizer implements StatListener<ClusterStats> {
-  static final Log LOG = LogFactory.getLog(ClusterSummarizer.class);
+  static final Logger LOG = LoggerFactory.getLogger(ClusterSummarizer.class);
   
   private int numBlacklistedTrackers;
   private int numActiveTrackers;

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/CompressionEmulationUtil.java

@@ -25,8 +25,8 @@ import java.nio.charset.Charset;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -58,7 +58,7 @@ import org.apache.hadoop.util.StringUtils;
  * This is a utility class for all the compression related modules.
  */
 class CompressionEmulationUtil {
-  static final Log LOG = LogFactory.getLog(CompressionEmulationUtil.class);
+  static final Logger LOG = LoggerFactory.getLogger(CompressionEmulationUtil.class);
   
   /**
    * Enable compression usage in GridMix runs.

+ 4 - 4
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/DistributedCacheEmulator.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.mapred.gridmix;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -83,8 +83,8 @@ import java.util.Map;
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
 class DistributedCacheEmulator {
-  private static final Log LOG =
-      LogFactory.getLog(DistributedCacheEmulator.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(DistributedCacheEmulator.class);
 
   static final long AVG_BYTES_PER_MAP = 128 * 1024 * 1024L;// 128MB
 

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/EchoUserResolver.java

@@ -22,14 +22,14 @@ import java.net.URI;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Echos the UGI offered.
  */
 public class EchoUserResolver implements UserResolver {
-  public static final Log LOG = LogFactory.getLog(Gridmix.class);
+  public static final Logger LOG = LoggerFactory.getLogger(Gridmix.class);
 
   public EchoUserResolver() {
     LOG.info(" Current user resolver is EchoUserResolver ");

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/ExecutionSummarizer.java

@@ -20,8 +20,8 @@ package org.apache.hadoop.mapred.gridmix;
 import java.io.IOException;
 
 import org.apache.commons.lang3.time.FastDateFormat;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -47,7 +47,7 @@ import org.apache.hadoop.util.StringUtils;
  * </ul>
  */
 class ExecutionSummarizer implements StatListener<JobStats> {
-  static final Log LOG = LogFactory.getLog(ExecutionSummarizer.class);
+  static final Logger LOG = LoggerFactory.getLogger(ExecutionSummarizer.class);
   private static final FastDateFormat UTIL = FastDateFormat.getInstance();
   
   private int numJobsInInputTrace;

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/FilePool.java

@@ -37,8 +37,8 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.mapred.gridmix.RandomAlgorithms.Selector;
 
 /**
@@ -47,7 +47,7 @@ import org.apache.hadoop.mapred.gridmix.RandomAlgorithms.Selector;
  */
 class FilePool {
 
-  public static final Log LOG = LogFactory.getLog(FilePool.class);
+  public static final Logger LOG = LoggerFactory.getLogger(FilePool.class);
 
   /**
    * The minimum file size added to the pool. Default 128MiB.

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJob.java

@@ -49,8 +49,8 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.tools.rumen.JobStory;
 import static org.apache.hadoop.tools.rumen.datatypes.util.MapReduceJobPropertiesParser.extractMaxHeapOpts;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Synthetic job generated from a trace description.
@@ -59,7 +59,7 @@ abstract class GridmixJob implements Callable<Job>, Delayed {
 
   // Gridmix job name format is GRIDMIX<6 digit sequence number>
   public static final String JOB_NAME_PREFIX = "GRIDMIX";
-  public static final Log LOG = LogFactory.getLog(GridmixJob.class);
+  public static final Logger LOG = LoggerFactory.getLogger(GridmixJob.class);
 
   private static final ThreadLocal<Formatter> nameFormat =
     new ThreadLocal<Formatter>() {

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/InputStriper.java

@@ -33,15 +33,15 @@ import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.CompressionCodecFactory;
 import org.apache.hadoop.mapreduce.lib.input.CombineFileSplit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Given a {@link #FilePool}, obtain a set of files capable of satisfying
  * a full set of splits, then iterate over each source to fill the request.
  */
 class InputStriper {
-  public static final Log LOG = LogFactory.getLog(InputStriper.class);
+  public static final Logger LOG = LoggerFactory.getLogger(InputStriper.class);
   int idx;
   long currentStart;
   FileStatus current;

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/JobFactory.java

@@ -18,8 +18,8 @@
 package org.apache.hadoop.mapred.gridmix;
 
 import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.JobConf;
@@ -52,7 +52,7 @@ import java.util.concurrent.atomic.AtomicInteger;
  */
 abstract class JobFactory<T> implements Gridmix.Component<Void>,StatListener<T> {
 
-  public static final Log LOG = LogFactory.getLog(JobFactory.class);
+  public static final Logger LOG = LoggerFactory.getLogger(JobFactory.class);
 
   protected final Path scratch;
   protected final float rateFactor;

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/JobMonitor.java

@@ -29,8 +29,8 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.mapred.gridmix.Statistics.JobStats;
 import org.apache.hadoop.mapreduce.Job;
@@ -54,7 +54,7 @@ import org.apache.hadoop.mapreduce.JobStatus;
  */
 class JobMonitor implements Gridmix.Component<JobStats> {
 
-  public static final Log LOG = LogFactory.getLog(JobMonitor.class);
+  public static final Logger LOG = LoggerFactory.getLogger(JobMonitor.class);
 
   private final Queue<JobStats> mJobs;
   private ExecutorService executor;

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/JobSubmitter.java

@@ -26,8 +26,8 @@ import java.util.concurrent.Semaphore;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.mapred.gridmix.Statistics.JobStats;
 
 /**
@@ -39,7 +39,7 @@ import org.apache.hadoop.mapred.gridmix.Statistics.JobStats;
  */
 class JobSubmitter implements Gridmix.Component<GridmixJob> {
 
-  public static final Log LOG = LogFactory.getLog(JobSubmitter.class);
+  public static final Logger LOG = LoggerFactory.getLogger(JobSubmitter.class);
 
   private final Semaphore sem;
   private final Statistics statistics;

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/LoadJob.java

@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.mapred.gridmix;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.NullWritable;
@@ -54,7 +54,7 @@ import java.util.Random;
  */
 class LoadJob extends GridmixJob {
 
-  public static final Log LOG = LogFactory.getLog(LoadJob.class);
+  public static final Logger LOG = LoggerFactory.getLogger(LoadJob.class);
 
   public LoadJob(final Configuration conf, long submissionMillis, 
                  final JobStory jobdesc, Path outRoot, UserGroupInformation ugi,

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/RandomTextDataGenerator.java

@@ -22,15 +22,15 @@ import java.util.List;
 import java.util.Random;
 
 import org.apache.commons.lang3.RandomStringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 
 /**
  * A random text generator. The words are simply sequences of alphabets.
  */
 class RandomTextDataGenerator {
-  static final Log LOG = LogFactory.getLog(RandomTextDataGenerator.class);
+  static final Logger LOG = LoggerFactory.getLogger(RandomTextDataGenerator.class);
   
   /**
    * Configuration key for random text data generator's list size.

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/ReplayJobFactory.java

@@ -23,15 +23,15 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.tools.rumen.JobStory;
 import org.apache.hadoop.tools.rumen.JobStoryProducer;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
 
  class ReplayJobFactory extends JobFactory<Statistics.ClusterStats> {
-  public static final Log LOG = LogFactory.getLog(ReplayJobFactory.class);
+  public static final Logger LOG = LoggerFactory.getLogger(ReplayJobFactory.class);
 
   /**
    * Creating a new instance does not start the thread.

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/RoundRobinUserResolver.java

@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.mapred.gridmix;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -34,7 +34,7 @@ import java.util.HashMap;
 import java.util.List;
 
 public class RoundRobinUserResolver implements UserResolver {
-  public static final Log LOG = LogFactory.getLog(RoundRobinUserResolver.class);
+  public static final Logger LOG = LoggerFactory.getLogger(RoundRobinUserResolver.class);
 
   private int uidx = 0;
   private List<UserGroupInformation> users = Collections.emptyList();

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/SerialJobFactory.java

@@ -24,8 +24,8 @@ import org.apache.hadoop.tools.rumen.JobStory;
 import org.apache.hadoop.tools.rumen.JobStoryProducer;
 import org.apache.hadoop.mapred.gridmix.Statistics.JobStats;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.concurrent.CountDownLatch;
@@ -33,7 +33,7 @@ import java.util.concurrent.locks.Condition;
 
 public class SerialJobFactory extends JobFactory<JobStats> {
 
-  public static final Log LOG = LogFactory.getLog(SerialJobFactory.class);
+  public static final Logger LOG = LoggerFactory.getLogger(SerialJobFactory.class);
   private final Condition jobCompleted = lock.newCondition();
 
   /**

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/SleepJob.java

@@ -50,12 +50,12 @@ import org.apache.hadoop.tools.rumen.JobStory;
 import org.apache.hadoop.tools.rumen.ReduceTaskAttemptInfo;
 import org.apache.hadoop.tools.rumen.TaskAttemptInfo;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.util.Time;
 
 public class SleepJob extends GridmixJob {
-  public static final Log LOG = LogFactory.getLog(SleepJob.class);
+  public static final Logger LOG = LoggerFactory.getLogger(SleepJob.class);
   private static final ThreadLocal <Random> rand = 
     new ThreadLocal <Random> () {
         @Override protected Random initialValue() {

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/Statistics.java

@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.mapred.gridmix;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.ClusterStatus;
 import org.apache.hadoop.mapred.JobClient;
@@ -50,7 +50,7 @@ import java.util.concurrent.locks.ReentrantLock;
  * notified either on every job completion event or some fixed time interval.
  */
 public class Statistics implements Component<Statistics.JobStats> {
-  public static final Log LOG = LogFactory.getLog(Statistics.class);
+  public static final Logger LOG = LoggerFactory.getLogger(Statistics.class);
 
   private final StatCollector statistics = new StatCollector();
   private JobClient cluster;

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/StressJobFactory.java

@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.mapred.gridmix;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
@@ -38,7 +38,7 @@ import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.atomic.AtomicBoolean;
 
 public class StressJobFactory extends JobFactory<Statistics.ClusterStats> {
-  public static final Log LOG = LogFactory.getLog(StressJobFactory.class);
+  public static final Logger LOG = LoggerFactory.getLogger(StressJobFactory.class);
 
   private final LoadStatus loadStatus = new LoadStatus();
   /**

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/SubmitterUserResolver.java

@@ -21,14 +21,14 @@ import java.io.IOException;
 import java.net.URI;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Resolves all UGIs to the submitting user.
  */
 public class SubmitterUserResolver implements UserResolver {
-  public static final Log LOG = LogFactory.getLog(SubmitterUserResolver.class);
+  public static final Logger LOG = LoggerFactory.getLogger(SubmitterUserResolver.class);
   
   private UserGroupInformation ugi = null;
 

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/CommonJobTest.java

@@ -33,8 +33,8 @@ import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.FileStatus;
@@ -57,7 +57,7 @@ import org.apache.hadoop.tools.rumen.JobStory;
 import org.apache.hadoop.util.ToolRunner;
 
 public class CommonJobTest {
-  public static final Log LOG = LogFactory.getLog(Gridmix.class);
+  public static final Logger LOG = LoggerFactory.getLogger(Gridmix.class);
 
   protected static int NJOBS = 2;
   protected static final long GENDATA = 1; // in megabytes

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/DebugJobProducer.java

@@ -40,12 +40,12 @@ import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.TimeUnit;
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 public class DebugJobProducer implements JobStoryProducer {
-  public static final Log LOG = LogFactory.getLog(DebugJobProducer.class);
+  public static final Logger LOG = LoggerFactory.getLogger(DebugJobProducer.class);
   final ArrayList<JobStory> submitted;
   private final Configuration conf;
   private final AtomicInteger numJobs;

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/GridmixTestUtils.java

@@ -16,8 +16,8 @@
  */
 package org.apache.hadoop.mapred.gridmix;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.permission.FsPermission;
@@ -35,7 +35,7 @@ import java.io.IOException;
  * This is a test class.
  */
 public class GridmixTestUtils {
-  private static final Log LOG = LogFactory.getLog(GridmixTestUtils.class);
+  private static final Logger LOG = LoggerFactory.getLogger(GridmixTestUtils.class);
   static final Path DEST = new Path("/gridmix");
   static FileSystem dfs = null;
   static MiniDFSCluster dfsCluster = null;

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestFilePool.java

@@ -28,8 +28,8 @@ import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import static org.junit.Assert.*;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
@@ -40,7 +40,7 @@ import org.apache.hadoop.mapreduce.lib.input.CombineFileSplit;
 
 public class TestFilePool {
 
-  static final Log LOG = LogFactory.getLog(TestFileQueue.class);
+  static final Logger LOG = LoggerFactory.getLogger(TestFileQueue.class);
   static final int NFILES = 26;
   static final Path base = getBaseDir();
 

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestFileQueue.java

@@ -26,8 +26,8 @@ import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import static org.junit.Assert.*;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -36,7 +36,7 @@ import org.apache.hadoop.mapreduce.lib.input.CombineFileSplit;
 
 public class TestFileQueue {
 
-  static final Log LOG = LogFactory.getLog(TestFileQueue.class);
+  static final Logger LOG = LoggerFactory.getLogger(TestFileQueue.class);
   static final int NFILES = 4;
   static final int BLOCK = 256;
   static final Path[] paths = new Path[NFILES];

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridMixClasses.java

@@ -30,8 +30,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.CountDownLatch;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.CustomOutputCommitter;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -85,7 +85,7 @@ import static org.mockito.Mockito.*;
 import static org.junit.Assert.*;
 
 public class TestGridMixClasses {
-  private static final Log LOG = LogFactory.getLog(TestGridMixClasses.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestGridMixClasses.class);
 
   /*
    * simple test LoadSplit (getters,copy, write, read...)

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridmixRecord.java

@@ -23,8 +23,8 @@ import java.util.Random;
 
 import org.junit.Test;
 import static org.junit.Assert.*;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
@@ -32,7 +32,7 @@ import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.io.WritableUtils;
 
 public class TestGridmixRecord {
-  private static final Log LOG = LogFactory.getLog(TestGridmixRecord.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestGridmixRecord.class);
 
   static void lengthTest(GridmixRecord x, GridmixRecord y, int min,
       int max) throws Exception {

+ 3 - 3
hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestRecordFactory.java

@@ -22,14 +22,14 @@ import java.util.Random;
 
 import org.junit.Test;
 import static org.junit.Assert.*;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.DataOutputBuffer;
 
 public class TestRecordFactory {
-  private static final Log LOG = LogFactory.getLog(TestRecordFactory.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestRecordFactory.class);
 
   public static void testFactory(long targetBytes, long targetRecs)
       throws Exception {

+ 4 - 4
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/DeskewedJobTraceReader.java

@@ -27,8 +27,8 @@ import java.util.PriorityQueue;
 import java.util.TreeMap;
 import java.util.TreeSet;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class DeskewedJobTraceReader implements Closeable {
   // underlying engine
@@ -57,8 +57,8 @@ public class DeskewedJobTraceReader implements Closeable {
 
   private final PriorityQueue<LoggedJob> skewBuffer;
 
-  static final private Log LOG =
-      LogFactory.getLog(DeskewedJobTraceReader.class);
+  static final private Logger LOG =
+      LoggerFactory.getLogger(DeskewedJobTraceReader.class);
 
   static private class JobComparator implements Comparator<LoggedJob>, 
   Serializable {

+ 3 - 3
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Folder.java

@@ -30,8 +30,8 @@ import java.util.Queue;
 import java.util.Random;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
@@ -52,7 +52,7 @@ public class Folder extends Configured implements Tool {
   private int skewBufferLength = 0;
   private long startsAfter = -1;
 
-  static final private Log LOG = LogFactory.getLog(Folder.class);
+  static final private Logger LOG = LoggerFactory.getLogger(Folder.class);
 
   private DeskewedJobTraceReader reader = null;
   private Outputter<LoggedJob> outGen = null;

+ 3 - 3
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java

@@ -36,8 +36,8 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import com.fasterxml.jackson.core.JsonProcessingException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
@@ -139,7 +139,7 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
   private Histogram successfulNthReducerAttempts;
   private Histogram mapperLocality;
 
-  static final private Log LOG = LogFactory.getLog(HadoopLogsAnalyzer.class);
+  static final private Logger LOG = LoggerFactory.getLogger(HadoopLogsAnalyzer.class);
 
   private int[] attemptTimesPercentiles;
 

+ 3 - 3
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HistoryEventEmitter.java

@@ -22,13 +22,13 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Queue;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.jobhistory.HistoryEvent;
 
 abstract class HistoryEventEmitter {
-  static final private Log LOG = LogFactory.getLog(HistoryEventEmitter.class);
+  static final private Logger LOG = LoggerFactory.getLogger(HistoryEventEmitter.class);
 
   abstract List<SingleEventEmitter> nonFinalSEEs();
 

+ 3 - 3
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java

@@ -26,8 +26,8 @@ import java.util.StringTokenizer;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.mapred.TaskStatus;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.mapreduce.TaskType;
@@ -69,7 +69,7 @@ public class JobBuilder {
   private static final long BYTES_IN_MEG =
       StringUtils.TraditionalBinaryPrefix.string2long("1m");
 
-  static final private Log LOG = LogFactory.getLog(JobBuilder.class);
+  static final private Logger LOG = LoggerFactory.getLogger(JobBuilder.class);
   
   private String jobID;
 

+ 3 - 3
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ParsedJob.java

@@ -25,8 +25,8 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.mapreduce.JobACL;
 import org.apache.hadoop.security.authorize.AccessControlList;
 
@@ -37,7 +37,7 @@ import org.apache.hadoop.security.authorize.AccessControlList;
  */
 public class ParsedJob extends LoggedJob {
 
-  private static final Log LOG = LogFactory.getLog(ParsedJob.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ParsedJob.class);
 
   private Map<String, Long> totalCountersMap = new HashMap<String, Long>();
   private Map<String, Long> mapCountersMap = new HashMap<String, Long>();

+ 3 - 3
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ParsedTask.java

@@ -22,8 +22,8 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.mapreduce.jobhistory.JhCounters;
 
 /**
@@ -33,7 +33,7 @@ import org.apache.hadoop.mapreduce.jobhistory.JhCounters;
  */
 public class ParsedTask extends LoggedTask {
 
-  private static final Log LOG = LogFactory.getLog(ParsedTask.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ParsedTask.class);
 
   private String diagnosticInfo;
   private String failedDueToAttempt;

+ 3 - 3
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ParsedTaskAttempt.java

@@ -21,8 +21,8 @@ package org.apache.hadoop.tools.rumen;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.mapreduce.jobhistory.JhCounters;
 
 /**
@@ -32,7 +32,7 @@ import org.apache.hadoop.mapreduce.jobhistory.JhCounters;
  */
 public class ParsedTaskAttempt extends LoggedTaskAttempt {
 
-  private static final Log LOG = LogFactory.getLog(ParsedTaskAttempt.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ParsedTaskAttempt.class);
 
   private String diagnosticInfo;
   private String trackerName;

+ 3 - 3
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/RandomSeedGenerator.java

@@ -21,8 +21,8 @@ import java.nio.charset.Charset;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The purpose of this class is to generate new random seeds from a master
@@ -42,7 +42,7 @@ import org.apache.commons.logging.LogFactory;
  * http://www.iro.umontreal.ca/~lecuyer/myftp/streams00/
  */
 public class RandomSeedGenerator {
-  private static Log LOG = LogFactory.getLog(RandomSeedGenerator.class);
+  private static Logger LOG = LoggerFactory.getLogger(RandomSeedGenerator.class);
   private static final Charset UTF_8 = Charset.forName("UTF-8");
   
   /** MD5 algorithm instance, one for each thread. */

+ 4 - 4
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/TraceBuilder.java

@@ -30,8 +30,8 @@ import java.util.Properties;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileStatus;
@@ -49,7 +49,7 @@ import org.apache.hadoop.util.ToolRunner;
  * The main driver of the Rumen Parser.
  */
 public class TraceBuilder extends Configured implements Tool {
-  static final private Log LOG = LogFactory.getLog(TraceBuilder.class);
+  static final private Logger LOG = LoggerFactory.getLogger(TraceBuilder.class);
 
   static final int RUN_METHOD_FAILED_EXIT_CODE = 3;
 
@@ -310,6 +310,6 @@ public class TraceBuilder extends Configured implements Tool {
   }
 
   void finish() {
-    IOUtils.cleanup(LOG, traceWriter, topologyWriter);
+    IOUtils.cleanupWithLogger(LOG, traceWriter, topologyWriter);
   }
 }

+ 3 - 3
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ZombieJob.java

@@ -23,8 +23,8 @@ import java.util.Map;
 import java.util.Random;
 import java.util.HashMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.TaskStatus.State;
@@ -49,7 +49,7 @@ import org.apache.hadoop.tools.rumen.Pre21JobHistoryConstants.Values;
  */
 @SuppressWarnings("deprecation")
 public class ZombieJob implements JobStory {
-  static final Log LOG = LogFactory.getLog(ZombieJob.class);
+  static final Logger LOG = LoggerFactory.getLogger(ZombieJob.class);
   private final LoggedJob job;
   private Map<TaskID, LoggedTask> loggedTaskMap;
   private Map<TaskAttemptID, LoggedTaskAttempt> loggedTaskAttemptMap;

+ 3 - 3
hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/resourcemanager/MockAMLauncher.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.yarn.sls.resourcemanager;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -42,7 +42,7 @@ import java.util.Map;
 
 public class MockAMLauncher extends ApplicationMasterLauncher
     implements EventHandler<AMLauncherEvent> {
-  private static final Log LOG = LogFactory.getLog(
+  private static final Logger LOG = LoggerFactory.getLogger(
       MockAMLauncher.class);
 
   Map<String, AMSimulator> amMap;

+ 3 - 3
hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthJob.java

@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.yarn.sls.synthetic;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.commons.math3.random.JDKRandomGenerator;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.JobConf;
@@ -51,7 +51,7 @@ import static org.apache.hadoop.mapreduce.MRJobConfig.QUEUE_NAME;
 public class SynthJob implements JobStory {
 
   @SuppressWarnings("StaticVariableName")
-  private static Log LOG = LogFactory.getLog(SynthJob.class);
+  private static Logger LOG = LoggerFactory.getLogger(SynthJob.class);
 
   private static final long MIN_MEMORY = 1024;
   private static final long MIN_VCORES = 1;

+ 3 - 3
hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java

@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.yarn.sls.synthetic;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.commons.math3.distribution.AbstractRealDistribution;
 import org.apache.commons.math3.random.JDKRandomGenerator;
 import org.apache.hadoop.conf.Configuration;
@@ -53,7 +53,7 @@ import static org.codehaus.jackson.map.DeserializationConfig.Feature.FAIL_ON_UNK
 public class SynthTraceJobProducer implements JobStoryProducer {
 
   @SuppressWarnings("StaticVariableName")
-  private static final Log LOG = LogFactory.getLog(SynthTraceJobProducer.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SynthTraceJobProducer.class);
 
   private final Configuration conf;
   private final AtomicInteger numJobs;

+ 9 - 8
hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeMapRed.java

@@ -24,7 +24,8 @@ import java.util.Arrays;
 import java.util.ArrayList;
 import java.util.Properties;
 
-import org.apache.commons.logging.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
@@ -44,7 +45,7 @@ import org.apache.hadoop.io.Text;
  */
 public abstract class PipeMapRed {
 
-  protected static final Log LOG = LogFactory.getLog(PipeMapRed.class.getName());
+  protected static final Logger LOG = LoggerFactory.getLogger(PipeMapRed.class.getName());
 
   /**
    * Returns the Configuration.
@@ -397,7 +398,7 @@ public abstract class PipeMapRed {
         }
       } catch (Throwable th) {
         outerrThreadsThrowable = th;
-        LOG.warn(th);
+        LOG.warn("{}", th);
       } finally {
         try {
           if (clientIn_ != null) {
@@ -405,7 +406,7 @@ public abstract class PipeMapRed {
             clientIn_ = null;
           }
         } catch (IOException io) {
-          LOG.info(io);
+          LOG.info("{}", io);
         }
       }
     }
@@ -466,7 +467,7 @@ public abstract class PipeMapRed {
         }
       } catch (Throwable th) {
         outerrThreadsThrowable = th;
-        LOG.warn(th);
+        LOG.warn("{}", th);
         try {
           if (lineReader != null) {
             lineReader.close();
@@ -476,7 +477,7 @@ public abstract class PipeMapRed {
             clientErr_ = null;
           }
         } catch (IOException io) {
-          LOG.info(io);
+          LOG.info("{}", io);
         }
       }
     }
@@ -531,13 +532,13 @@ public abstract class PipeMapRed {
           clientOut_.flush();
           clientOut_.close();
         } catch (IOException io) {
-          LOG.warn(io);
+          LOG.warn("{}", io);
         }
       }
       try {
         waitOutputThreads();
       } catch (IOException io) {
-        LOG.warn(io);
+        LOG.warn("{}", io);
       }
       if (sim != null) sim.destroy();
       LOG.info("mapRedFinished");

+ 3 - 2
hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamBaseRecordReader.java

@@ -30,7 +30,8 @@ import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.FileSplit;
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.commons.logging.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** 
  * Shared functionality for hadoopStreaming formats.
@@ -40,7 +41,7 @@ import org.apache.commons.logging.*;
  */
 public abstract class StreamBaseRecordReader implements RecordReader<Text, Text> {
 
-  protected static final Log LOG = LogFactory.getLog(StreamBaseRecordReader.class.getName());
+  protected static final Logger LOG = LoggerFactory.getLogger(StreamBaseRecordReader.class.getName());
 
   // custom JobConf properties for this class are prefixed with this namespace
   final static String CONF_NS = "stream.recordreader.";

+ 3 - 3
hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java

@@ -36,8 +36,8 @@ import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.mapreduce.MRConfig;
@@ -79,7 +79,7 @@ import static org.apache.hadoop.util.RunJar.MATCH_ANY;
  */
 public class StreamJob implements Tool {
 
-  protected static final Log LOG = LogFactory.getLog(StreamJob.class.getName());
+  protected static final Logger LOG = LoggerFactory.getLogger(StreamJob.class.getName());
   final static String REDUCE_NONE = "NONE";
 
   /** -----------Streaming CLI Implementation  **/

+ 4 - 4
hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/mapreduce/StreamBaseRecordReader.java

@@ -20,8 +20,8 @@ package org.apache.hadoop.streaming.mapreduce;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -40,8 +40,8 @@ import org.apache.hadoop.streaming.StreamUtil;
  */
 public abstract class StreamBaseRecordReader extends RecordReader<Text, Text> {
 
-  protected static final Log LOG = LogFactory
-      .getLog(StreamBaseRecordReader.class.getName());
+  protected static final Logger LOG = LoggerFactory
+      .getLogger(StreamBaseRecordReader.class.getName());
 
   // custom JobConf properties for this class are prefixed with this namespace
   final static String CONF_NS = "stream.recordreader.";

+ 3 - 3
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java

@@ -25,8 +25,8 @@ import java.util.Map;
 import java.util.zip.ZipEntry;
 import java.util.zip.ZipOutputStream;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -42,7 +42,7 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
  */
 public class TestMultipleArchiveFiles extends TestStreaming
 {
-  private static final Log LOG = LogFactory.getLog(TestMultipleArchiveFiles.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestMultipleArchiveFiles.class);
 
   private StreamJob job;
   private String INPUT_DIR = "multiple-archive-files/";

+ 3 - 3
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamXmlMultipleRecords.java

@@ -23,8 +23,8 @@ import java.io.IOException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.junit.Before;
 import org.junit.Test;
@@ -38,7 +38,7 @@ import org.junit.Test;
  */
 public class TestStreamXmlMultipleRecords extends TestStreaming
 {
-  private static final Log LOG = LogFactory.getLog(
+  private static final Logger LOG = LoggerFactory.getLogger(
       TestStreamXmlMultipleRecords.class);
 
   private boolean hasPerl = false;

+ 4 - 4
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java

@@ -31,8 +31,8 @@ import java.util.List;
 import java.util.Properties;
 import java.util.StringTokenizer;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.ClusterMapReduceTestCase;
@@ -51,8 +51,8 @@ import static org.junit.Assert.assertTrue;
 public class TestStreamingBadRecords extends ClusterMapReduceTestCase
 {
 
-  private static final Log LOG = 
-    LogFactory.getLog(TestStreamingBadRecords.class);
+  private static final Logger LOG =
+    LoggerFactory.getLogger(TestStreamingBadRecords.class);
   
   private static final List<String> MAPPER_BAD_RECORDS = 
     Arrays.asList("hey022","hey023","hey099");

+ 3 - 3
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/UtilTest.java

@@ -26,13 +26,13 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.util.Shell.ShellCommandExecutor;
 
 class UtilTest {
 
-  private static final Log LOG = LogFactory.getLog(UtilTest.class);
+  private static final Logger LOG = LoggerFactory.getLogger(UtilTest.class);
 
   /**
    * Utility routine to recurisvely delete a directory.