Browse Source

MAPREDUCE-6026. native-task: fix logging. Contributed by Manu Zhang.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1617878 13f79535-47bb-0310-9956-ffa450edef68
Todd Lipcon 11 years ago
parent
commit
808bf8bac1

+ 1 - 0
hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt

@@ -13,3 +13,4 @@ MAPREDUCE-5984. native-task: Reuse lz4 sources in hadoop-common (Binglin Chang)
 MAPREDUCE-5976. native-task: should not fail to build if snappy is missing (Manu Zhang)
 MAPREDUCE-5978. native-task: remove test case for not supported codec Bzip2Codec and DefaultCodec (Manu Zhang)
 MAPREDUCE-6006. native-task: add native tests to maven and fix bug in pom.xml (Binglin Chang via todd)
+MAPREDUCE-6026. native-task: fix logging (Manu Zhang via todd)

+ 3 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/HadoopPlatform.java

@@ -19,6 +19,8 @@ package org.apache.hadoop.mapred.nativetask;
 
 import java.io.IOException;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.ByteWritable;
 import org.apache.hadoop.io.BytesWritable;
@@ -33,10 +35,9 @@ import org.apache.hadoop.io.VLongWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.nativetask.serde.*;
-import org.apache.log4j.Logger;
 
 public class HadoopPlatform extends Platform {
-  private static final Logger LOG = Logger.getLogger(HadoopPlatform.class);
+  private static final Log LOG = LogFactory.getLog(HadoopPlatform.class);
 
   public HadoopPlatform() throws IOException {
   }

+ 3 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/Platforms.java

@@ -20,11 +20,12 @@ package org.apache.hadoop.mapred.nativetask;
 import java.io.IOException;
 import java.util.ServiceLoader;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.nativetask.serde.INativeSerializer;
 import org.apache.hadoop.mapred.nativetask.serde.NativeSerialization;
-import org.apache.log4j.Logger;
 
 
 /**
@@ -34,7 +35,7 @@ import org.apache.log4j.Logger;
  */
 public class Platforms {
 
-  private static final Logger LOG = Logger.getLogger(Platforms.class);
+  private static final Log LOG = LogFactory.getLog(Platforms.class);
   private static final ServiceLoader<Platform> platforms = ServiceLoader.load(Platform.class);
   
   public static void init(Configuration conf) throws IOException {

+ 4 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/LargeKVCombinerTest.java

@@ -19,6 +19,8 @@ package org.apache.hadoop.mapred.nativetask.combinertest;
 
 import static org.junit.Assert.assertEquals;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -34,6 +36,7 @@ import org.apache.hadoop.mapreduce.Job;
 import org.junit.Test;
 
 public class LargeKVCombinerTest {
+  private static final Log LOG = LogFactory.getLog(LargeKVCombinerTest.class);
 
   @Test
   public void testLargeValueCombiner(){
@@ -57,7 +60,7 @@ public class LargeKVCombinerTest {
         int max = i;
         int min = Math.max(i / 4, max - 10);
         
-        System.out.println("===KV Size Test: min size: " + min + ", max size: " + max);
+        LOG.info("===KV Size Test: min size: " + min + ", max size: " + max);
         
         normalConf.set(TestConstants.NATIVETASK_KVSIZE_MIN, String.valueOf(min));
         normalConf.set(TestConstants.NATIVETASK_KVSIZE_MAX, String.valueOf(max));

+ 6 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVTest.java

@@ -23,6 +23,8 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -37,6 +39,8 @@ import org.junit.runners.Parameterized.Parameters;
 
 @RunWith(Parameterized.class)
 public class KVTest {
+  private static final Log LOG = LogFactory.getLog(KVTest.class);
+
   private static Class<?>[] keyclasses = null;
   private static Class<?>[] valueclasses = null;
   private static String[] keyclassNames = null;
@@ -53,7 +57,7 @@ public class KVTest {
   public static Iterable<Class<?>[]> data() {
     final String valueclassesStr = nativekvtestconf
         .get(TestConstants.NATIVETASK_KVTEST_VALUECLASSES);
-    System.out.println(valueclassesStr);
+    LOG.info(valueclassesStr);
     valueclassNames = valueclassesStr.replaceAll("\\s", "").split(";");// delete
     // " "
     final ArrayList<Class<?>> tmpvalueclasses = new ArrayList<Class<?>>();
@@ -69,7 +73,7 @@ public class KVTest {
     }
     valueclasses = tmpvalueclasses.toArray(new Class[tmpvalueclasses.size()]);
     final String keyclassesStr = nativekvtestconf.get(TestConstants.NATIVETASK_KVTEST_KEYCLASSES);
-    System.out.println(keyclassesStr);
+    LOG.info(keyclassesStr);
     keyclassNames = keyclassesStr.replaceAll("\\s", "").split(";");// delete
     // " "
     final ArrayList<Class<?>> tmpkeyclasses = new ArrayList<Class<?>>();

+ 5 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/LargeKVTest.java

@@ -21,6 +21,8 @@ import static org.junit.Assert.assertEquals;
 
 import java.io.IOException;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -32,6 +34,7 @@ import org.apache.hadoop.mapred.nativetask.testutil.TestConstants;
 import org.junit.Test;
 
 public class LargeKVTest {
+  private static final Log LOG = LogFactory.getLog(LargeKVTest.class);
 
   @Test
   public void testKeySize() {
@@ -69,8 +72,8 @@ public class LargeKVTest {
         normalConf.set(TestConstants.NATIVETASK_KVSIZE_MIN, String.valueOf(min));
         normalConf.set(TestConstants.NATIVETASK_KVSIZE_MAX, String.valueOf(max));
 
-        System.out.println("===KV Size Test: min size: " + min + ", max size: " + max + ", keyClass: "
-            + keyClass.getName() + ", valueClass: " + valueClass.getName());
+        LOG.info("===KV Size Test: min size: " + min + ", max size: " + max + ", keyClass: "
+          + keyClass.getName() + ", valueClass: " + valueClass.getName());
 
         final String nativeOutPut = runNativeLargeKVTest("Test Large Value Size:" + String.valueOf(i), keyClass,
             valueClass, nativeConf);

+ 6 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/TestInputFile.java

@@ -21,6 +21,8 @@ import java.io.IOException;
 import java.util.HashMap;
 import java.util.Random;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -40,7 +42,8 @@ import org.apache.hadoop.mapred.nativetask.testutil.TestConstants;
 
 
 public class TestInputFile {
-	
+	private static Log LOG = LogFactory.getLog(TestInputFile.class);
+
   public static class KVSizeScope {
     private static final int DefaultMinNum = 1;
     private static final int DefaultMaxNum = 64;
@@ -120,8 +123,8 @@ public class TestInputFile {
   }
   
   public void createSequenceTestFile(String filepath, int base,  byte start) throws Exception {
-    System.out.println("create file " + filepath);
-    System.out.println(keyClsName + " " + valueClsName);
+    LOG.info("create file " + filepath);
+    LOG.info(keyClsName + " " + valueClsName);
     Class<?> tmpkeycls, tmpvaluecls;
     try {
       tmpkeycls = Class.forName(keyClsName);

+ 4 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/EnforceNativeOutputCollectorDelegator.java

@@ -19,9 +19,12 @@ package org.apache.hadoop.mapred.nativetask.testutil;
 
 import java.io.IOException;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapred.nativetask.NativeMapOutputCollectorDelegator;
 
 public class EnforceNativeOutputCollectorDelegator<K, V> extends NativeMapOutputCollectorDelegator<K, V> {
+  private static final Log LOG = LogFactory.getLog(EnforceNativeOutputCollectorDelegator.class);
   private boolean nativetaskloaded = false;
 
   @Override
@@ -32,8 +35,7 @@ public class EnforceNativeOutputCollectorDelegator<K, V> extends NativeMapOutput
       nativetaskloaded = true;
     } catch (final Exception e) {
       nativetaskloaded = false;
-      System.err.println("load nativetask lib failed, Native-Task Delegation is disabled");
-      e.printStackTrace();
+      LOG.error("load nativetask lib failed, Native-Task Delegation is disabled", e);
     }
   }
 

+ 19 - 0
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/resources/log4j.properties

@@ -0,0 +1,19 @@
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+# log4j configuration used during build and unit tests
+
+log4j.rootLogger=info,stdout
+log4j.threshhold=ALL
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2} (%F:%M(%L)) - %m%n