浏览代码

HADOOP-3265. Removed depcrecated API getFileCacheHints().
(Lohit Vijayarenu via rangadi)


git-svn-id: https://svn.apache.org/repos/asf/hadoop/core/trunk@659405 13f79535-47bb-0310-9956-ffa450edef68

Raghu Angadi 17 年之前
父节点
当前提交
afbb689f37
共有 3 个文件被更改,包括 7 次插入31 次删除
  1. 3 0
      CHANGES.txt
  2. 0 29
      src/java/org/apache/hadoop/fs/FileSystem.java
  3. 4 2
      src/test/org/apache/hadoop/dfs/TestFileAppend.java

+ 3 - 0
CHANGES.txt

@@ -58,6 +58,9 @@ Trunk (unreleased changes)
     MapTaskStatus, ReduceTaskStatus, JobSubmissionProtocol, 
     CompletedJobStatusStore. (enis via omaley)
 
+    HADOOP-3265. Removed depcrecated API getFileCacheHints().
+    (Lohit Vijayarenu via rangadi)
+
   NEW FEATURES
 
     HADOOP-3074. Provides a UrlStreamHandler for DFS and other FS,

+ 0 - 29
src/java/org/apache/hadoop/fs/FileSystem.java

@@ -317,35 +317,6 @@ public abstract class FileSystem extends Configured implements Closeable {
     }
   }
 
-  /**
-   * @deprecated Use getFileBlockLocations() instead
-   *
-   * Return a 2D array of size 1x1 or greater, containing hostnames 
-   * where portions of the given file can be found.  For a nonexistent 
-   * file or regions, null will be returned.
-   *
-   * This call is most helpful with DFS, where it returns 
-   * hostnames of machines that contain the given file.
-   *
-   * The FileSystem will simply return an elt containing 'localhost'.
-   */
-  @Deprecated
-  public String[][] getFileCacheHints(Path f, long start, long len)
-      throws IOException {
-    BlockLocation[] blkLocations = getFileBlockLocations(f, start, len);
-    if ((blkLocations == null) || (blkLocations.length == 0)) {
-      return new String[0][];
-    }
-    int blkCount = blkLocations.length;
-    String[][] hints = new String[blkCount][];
-    for (int i=0; i < blkCount; i++) {
-      String[] hosts = blkLocations[i].getHosts();
-      hints[i] = new String[hosts.length];
-      hints[i] = hosts;
-    }
-    return hints;
-  }
-
   /**
    * Return an array containing hostnames, offset and size of 
    * portions of the given file.  For a nonexistent 

+ 4 - 2
src/test/org/apache/hadoop/dfs/TestFileAppend.java

@@ -29,6 +29,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileUtil.HardLink;
+import org.apache.hadoop.fs.BlockLocation;
 
 /**
  * This class tests the building blocks that are needed to
@@ -91,14 +92,15 @@ public class TestFileAppend extends TestCase {
         Thread.sleep(1000);
       } catch (InterruptedException e) {}
       done = true;
-      String[][] locations = fileSys.getFileCacheHints(name, 0, fileSize);
+      BlockLocation[] locations = fileSys.getFileBlockLocations(name, 0, 
+                                                                fileSize);
       if (locations.length < numBlocks) {
         System.out.println("Number of blocks found " + locations.length);
         done = false;
         continue;
       }
       for (int idx = 0; idx < numBlocks; idx++) {
-        if (locations[idx].length < repl) {
+        if (locations[idx].getHosts().length < repl) {
           System.out.println("Block index " + idx + " not yet replciated.");
           done = false;
           break;