浏览代码

HADOOP-3533. Add deprecated methods to provide API compatibility
between 0.18 and 0.17. Remove the deprecated methods in trunk.


git-svn-id: https://svn.apache.org/repos/asf/hadoop/core/trunk@669408 13f79535-47bb-0310-9956-ffa450edef68

Owen O'Malley 17 年之前
父节点
当前提交
232e510ca8

+ 3 - 0
CHANGES.txt

@@ -612,6 +612,9 @@ Release 0.18.0 - Unreleased
     HADOOP-3586. Provide deprecated, backwards compatibile semantics for the
     HADOOP-3586. Provide deprecated, backwards compatibile semantics for the
     combiner to be run once and only once on each record. (cdouglas)
     combiner to be run once and only once on each record. (cdouglas)
 
 
+    HADOOP-3533. Add deprecated methods to provide API compatibility
+    between 0.18 and 0.17. Remove the deprecated methods in trunk. (omalley)
+
 Release 0.17.1 - Unreleased
 Release 0.17.1 - Unreleased
 
 
   INCOMPATIBLE CHANGES
   INCOMPATIBLE CHANGES

+ 1 - 1
src/core/org/apache/hadoop/ipc/Client.java

@@ -79,7 +79,7 @@ public class Client {
   private int refCount = 1;
   private int refCount = 1;
   
   
   final private static String PING_INTERVAL_NAME = "ipc.ping.interval";
   final private static String PING_INTERVAL_NAME = "ipc.ping.interval";
-  final public static int DEFAULT_PING_INTERVAL = 60000; // 1 min
+  final static int DEFAULT_PING_INTERVAL = 60000; // 1 min
   final static int PING_CALL_ID = -1;
   final static int PING_CALL_ID = -1;
   
   
   /**
   /**

+ 1 - 1
src/core/org/apache/hadoop/metrics/util/MetricsIntValue.java

@@ -32,7 +32,7 @@ import org.apache.commons.logging.LogFactory;
  */
  */
 public class MetricsIntValue {  
 public class MetricsIntValue {  
 
 
-  protected static final Log LOG =
+  private static final Log LOG =
     LogFactory.getLog("org.apache.hadoop.metrics.util");
     LogFactory.getLog("org.apache.hadoop.metrics.util");
 
 
   private String name;
   private String name;

+ 1 - 1
src/core/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java

@@ -34,7 +34,7 @@ import org.apache.commons.logging.LogFactory;
  */
  */
 public class MetricsTimeVaryingInt {
 public class MetricsTimeVaryingInt {
 
 
-  protected static final Log LOG =
+  private static final Log LOG =
     LogFactory.getLog("org.apache.hadoop.metrics.util");
     LogFactory.getLog("org.apache.hadoop.metrics.util");
   
   
   private String name;
   private String name;

+ 1 - 1
src/core/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java

@@ -34,7 +34,7 @@ import org.apache.commons.logging.LogFactory;
  */
  */
 public class MetricsTimeVaryingRate {
 public class MetricsTimeVaryingRate {
 
 
-  protected static final Log LOG =
+  private static final Log LOG =
     LogFactory.getLog("org.apache.hadoop.metrics.util");
     LogFactory.getLog("org.apache.hadoop.metrics.util");
 
 
   static class Metrics {
   static class Metrics {

+ 1 - 1
src/mapred/org/apache/hadoop/mapred/TaskLog.java

@@ -116,7 +116,7 @@ public class TaskLog {
     }
     }
   }
   }
 
 
-  public static class Reader extends InputStream {
+  static class Reader extends InputStream {
     private long bytesRemaining;
     private long bytesRemaining;
     private FileInputStream file;
     private FileInputStream file;
     /**
     /**

+ 8 - 9
src/test/org/apache/hadoop/dfs/NNBench.java

@@ -57,7 +57,6 @@ import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reducer;
-import org.apache.hadoop.mapred.TaskTracker;
 
 
 /**
 /**
  * This program executes a specified operation that applies load to 
  * This program executes a specified operation that applies load to 
@@ -79,7 +78,7 @@ import org.apache.hadoop.mapred.TaskTracker;
  */
  */
 
 
 public class NNBench {
 public class NNBench {
-  protected static final Log LOG = LogFactory.getLog(
+  private static final Log LOG = LogFactory.getLog(
           "org.apache.hadoop.dfs.NNBench");
           "org.apache.hadoop.dfs.NNBench");
   
   
   protected static String CONTROL_DIR_NAME = "control";
   protected static String CONTROL_DIR_NAME = "control";
@@ -655,7 +654,7 @@ public class NNBench {
       
       
       // If the sleep time is greater than 0, then sleep and return
       // If the sleep time is greater than 0, then sleep and return
       if (sleepTime > 0) {
       if (sleepTime > 0) {
-        TaskTracker.LOG.info("Waiting in barrier for: " + sleepTime + " ms");
+        LOG.info("Waiting in barrier for: " + sleepTime + " ms");
       
       
         try {
         try {
           Thread.sleep(sleepTime);
           Thread.sleep(sleepTime);
@@ -773,7 +772,7 @@ public class NNBench {
 
 
             reporter.setStatus("Finish "+ l + " files");
             reporter.setStatus("Finish "+ l + " files");
           } catch (IOException e) {
           } catch (IOException e) {
-            TaskTracker.LOG.info("Exception recorded in op: " +
+            LOG.info("Exception recorded in op: " +
                     "Create/Write/Close");
                     "Create/Write/Close");
  
  
             numOfExceptions++;
             numOfExceptions++;
@@ -816,7 +815,7 @@ public class NNBench {
 
 
             reporter.setStatus("Finish "+ l + " files");
             reporter.setStatus("Finish "+ l + " files");
           } catch (IOException e) {
           } catch (IOException e) {
-            TaskTracker.LOG.info("Exception recorded in op: OpenRead " + e);
+            LOG.info("Exception recorded in op: OpenRead " + e);
             numOfExceptions++;
             numOfExceptions++;
           }
           }
         }
         }
@@ -848,7 +847,7 @@ public class NNBench {
 
 
             reporter.setStatus("Finish "+ l + " files");
             reporter.setStatus("Finish "+ l + " files");
           } catch (IOException e) {
           } catch (IOException e) {
-            TaskTracker.LOG.info("Exception recorded in op: Rename");
+            LOG.info("Exception recorded in op: Rename");
 
 
             numOfExceptions++;
             numOfExceptions++;
           }
           }
@@ -879,7 +878,7 @@ public class NNBench {
 
 
             reporter.setStatus("Finish "+ l + " files");
             reporter.setStatus("Finish "+ l + " files");
           } catch (IOException e) {
           } catch (IOException e) {
-            TaskTracker.LOG.info("Exception in recorded op: Delete");
+            LOG.info("Exception in recorded op: Delete");
 
 
             numOfExceptions++;
             numOfExceptions++;
           }
           }
@@ -897,13 +896,13 @@ public class NNBench {
     protected String hostName;
     protected String hostName;
 
 
     public NNBenchReducer () {
     public NNBenchReducer () {
-      TaskTracker.LOG.info("Starting NNBenchReducer !!!");
+      LOG.info("Starting NNBenchReducer !!!");
       try {
       try {
         hostName = java.net.InetAddress.getLocalHost().getHostName();
         hostName = java.net.InetAddress.getLocalHost().getHostName();
       } catch(Exception e) {
       } catch(Exception e) {
         hostName = "localhost";
         hostName = "localhost";
       }
       }
-      TaskTracker.LOG.info("Starting NNBenchReducer on " + hostName);
+      LOG.info("Starting NNBenchReducer on " + hostName);
     }
     }
 
 
     /**
     /**

+ 5 - 3
src/test/org/apache/hadoop/fs/AccumulatingReducer.java

@@ -20,13 +20,14 @@ package org.apache.hadoop.fs;
 import java.io.IOException;
 import java.io.IOException;
 import java.util.Iterator;
 import java.util.Iterator;
 
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.UTF8;
 import org.apache.hadoop.io.UTF8;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.MapReduceBase;
 import org.apache.hadoop.mapred.MapReduceBase;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.mapred.TaskTracker;
 
 
 /**
 /**
  * Reducer that accumulates values based on their type.
  * Reducer that accumulates values based on their type.
@@ -46,17 +47,18 @@ import org.apache.hadoop.mapred.TaskTracker;
  */
  */
 public class AccumulatingReducer extends MapReduceBase
 public class AccumulatingReducer extends MapReduceBase
     implements Reducer<UTF8, UTF8, UTF8, UTF8> {
     implements Reducer<UTF8, UTF8, UTF8, UTF8> {
+  private static final Log LOG = LogFactory.getLog(AccumulatingReducer.class);
   
   
   protected String hostName;
   protected String hostName;
   
   
   public AccumulatingReducer () {
   public AccumulatingReducer () {
-    TaskTracker.LOG.info("Starting AccumulatingReducer !!!");
+    LOG.info("Starting AccumulatingReducer !!!");
     try {
     try {
       hostName = java.net.InetAddress.getLocalHost().getHostName();
       hostName = java.net.InetAddress.getLocalHost().getHostName();
     } catch(Exception e) {
     } catch(Exception e) {
       hostName = "localhost";
       hostName = "localhost";
     }
     }
-    TaskTracker.LOG.info("Starting AccumulatingReducer on " + hostName);
+    LOG.info("Starting AccumulatingReducer on " + hostName);
   }
   }
   
   
   public void reduce(UTF8 key, 
   public void reduce(UTF8 key,