瀏覽代碼

HADOOP-781. Remove methods deprecated in 0.10 that are no longer widely used. (cutting)

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@496823 13f79535-47bb-0310-9956-ffa450edef68
Doug Cutting 18 年之前
父節點
當前提交
282f31b345

+ 6 - 0
CHANGES.txt

@@ -1,6 +1,12 @@
 Hadoop Change Log
 
 
+Trunk (unreleased changes)
+
+ 1. HADOOP-781.  Remove methods deprecated in 0.10 that are no longer
+    widely used.  (cutting)
+
+
 Release 0.10.1 - 2007-01-10
 
  1. HADOOP-857.  Fix S3 FileSystem implementation to permit its use

+ 1 - 6
src/java/org/apache/hadoop/fs/FileSystem.java

@@ -73,7 +73,7 @@ public abstract class FileSystem extends Configured {
             fs = new DistributedFileSystem(addr, conf);
         } else if ("-local".equals(cmd)) {
             i++;
-            fs = new LocalFileSystem(conf);
+            fs = FileSystem.getLocal(conf);
         } else {
             fs = get(conf);                          // using default
             LOG.info("No FS indicated, using default:"+fs.getName());
@@ -210,11 +210,6 @@ public abstract class FileSystem extends Configured {
     // FileSystem
     ///////////////////////////////////////////////////////////////
 
-    /** @deprecated */
-    protected FileSystem(Configuration conf) {
-      super(conf);
-    }
-
     protected FileSystem() {
       super(null);
     }

+ 0 - 5
src/java/org/apache/hadoop/fs/LocalFileSystem.java

@@ -44,11 +44,6 @@ public class LocalFileSystem extends FileSystem {
     
     public LocalFileSystem() {}
 
-    /** @deprecated */
-    public LocalFileSystem(Configuration conf) throws IOException {
-      initialize(NAME, conf);
-    }
-
     /**
      * Return 1x1 'localhost' cell if the file exists.
      * Return null if otherwise.

+ 1 - 11
src/java/org/apache/hadoop/io/MapFile.java

@@ -67,16 +67,6 @@ public class MapFile {
     private WritableComparable lastKey;
 
 
-    /** Create the named map for keys of the named class.
-     * @deprecated
-     */
-    public Writer(FileSystem fs, String dirName,
-                  Class keyClass, Class valClass)
-      throws IOException {
-      this(new Configuration(), fs, dirName,
-           WritableComparator.get(keyClass), valClass);
-    }
-
     /** Create the named map for keys of the named class. */
     public Writer(Configuration conf, FileSystem fs, String dirName,
                   Class keyClass, Class valClass)
@@ -508,7 +498,7 @@ public class MapFile {
 
     Configuration conf = new Configuration();
     int ioFileBufferSize = conf.getInt("io.file.buffer.size", 4096);
-    FileSystem fs = new LocalFileSystem(conf);
+    FileSystem fs = FileSystem.getLocal(conf);
     MapFile.Reader reader = new MapFile.Reader(fs, in, conf);
     MapFile.Writer writer =
       new MapFile.Writer(conf, fs, out, reader.getKeyClass(), reader.getValueClass());

+ 4 - 10
src/java/org/apache/hadoop/io/SetFile.java

@@ -29,20 +29,14 @@ public class SetFile extends MapFile {
 
   protected SetFile() {}                            // no public ctor
 
-  /** Write a new set file. */
+  /** Write a new set file.
+   * @deprecated pass a Configuration too
+   */
   public static class Writer extends MapFile.Writer {
 
     /** Create the named set for keys of the named class. */
     public Writer(FileSystem fs, String dirName, Class keyClass) throws IOException {
-      super(fs, dirName, keyClass, NullWritable.class);
-    }
-
-    /** Create the named set using the named key comparator.
-     * @deprecated
-     */
-    public Writer(FileSystem fs, String dirName, WritableComparator comparator)
-      throws IOException {
-      super(new Configuration(), fs, dirName, comparator, NullWritable.class);
+      super(new Configuration(), fs, dirName, keyClass, NullWritable.class);
     }
 
     /** Create a set naming the element class and compression type. */

+ 0 - 5
src/java/org/apache/hadoop/mapred/PhasedFileSystem.java

@@ -50,8 +50,6 @@ public class PhasedFileSystem extends FileSystem {
    */
   public PhasedFileSystem(FileSystem fs, String jobid, 
       String tipid, String taskid) {
-    super(fs.getConf()); // not used
-    
     this.baseFS = fs ; 
     this.jobid = jobid; 
     this.tipid = tipid ; 
@@ -66,8 +64,6 @@ public class PhasedFileSystem extends FileSystem {
    * @param conf JobConf
    */
   public PhasedFileSystem(FileSystem fs, JobConf conf) {
-    super(fs.getConf()); // not used
-    
     this.baseFS = fs ; 
     this.jobid = conf.get("mapred.job.id"); 
     this.tipid = conf.get("mapred.tip.id"); 
@@ -80,7 +76,6 @@ public class PhasedFileSystem extends FileSystem {
    * @param conf
    */
   protected PhasedFileSystem(Configuration conf){
-    super(conf);
     throw new UnsupportedOperationException("Operation not supported"); 
   }
   

+ 0 - 11
src/java/org/apache/hadoop/mapred/TaskTrackerStatus.java

@@ -94,17 +94,6 @@ class TaskTrackerStatus implements Writable {
       return failures;
     }
     
-    /**
-     * All current tasks at the TaskTracker.  
-     *
-     * Tasks are tracked by a TaskStatus object.
-     * 
-     * @deprecated use {@link #getTaskReports()} instead
-     */
-    public Iterator taskReports() {
-        return taskReports.iterator();
-    }
-
     /**
      * Get the current tasks at the TaskTracker.
      * Tasks are tracked by a {@link TaskStatus} object.

+ 2 - 2
src/test/org/apache/hadoop/io/TestArrayFile.java

@@ -38,7 +38,7 @@ public class TestArrayFile extends TestCase {
 
   public void testArrayFile() throws Exception {
       Configuration conf = new Configuration();
-    FileSystem fs = new LocalFileSystem(conf);
+    FileSystem fs = FileSystem.getLocal(conf);
     RandomDatum[] data = generate(10000);
     writeTest(fs, data, FILE);
     readTest(fs, data, FILE, conf);
@@ -46,7 +46,7 @@ public class TestArrayFile extends TestCase {
 
   public void testEmptyFile() throws Exception {
     Configuration conf = new Configuration();
-    FileSystem fs = new LocalFileSystem(conf);
+    FileSystem fs = FileSystem.getLocal(conf);
     writeTest(fs, new RandomDatum[0], FILE);
     ArrayFile.Reader reader = new ArrayFile.Reader(fs, FILE, conf);
     assertNull(reader.get(0, new RandomDatum()));

+ 1 - 1
src/test/org/apache/hadoop/io/TestSequenceFile.java

@@ -77,7 +77,7 @@ public class TestSequenceFile extends TestCase {
     int seed = new Random().nextInt();
     LOG.info("Seed = " + seed);
 
-    FileSystem fs = new LocalFileSystem(conf);
+    FileSystem fs = FileSystem.getLocal(conf);
     try {
         // SequenceFile.Writer
         writeTest(fs, count, seed, file, CompressionType.NONE, null);

+ 1 - 1
src/test/org/apache/hadoop/io/TestSetFile.java

@@ -39,7 +39,7 @@ public class TestSetFile extends TestCase {
   public TestSetFile(String name) { super(name); }
 
   public void testSetFile() throws Exception {
-    FileSystem fs = new LocalFileSystem(conf);
+    FileSystem fs = FileSystem.getLocal(conf);
     try {
         RandomDatum[] data = generate(10000);
         writeTest(fs, data, FILE, CompressionType.NONE);

+ 1 - 1
src/webapps/job/machines.jsp

@@ -34,7 +34,7 @@
           sinceHeartbeat = sinceHeartbeat / 1000;
         }
         int numCurTasks = 0;
-        for (Iterator it2 = tt.taskReports(); it2.hasNext(); ) {
+        for (Iterator it2 = tt.getTaskReports().iterator(); it2.hasNext(); ) {
           it2.next();
           numCurTasks++;
         }