Ver código fonte

HADOOP-2928. Remove deprecated FileSystem.getContentLength(). (Lohit Vjayarenu via rangadi)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/core/trunk@648357 13f79535-47bb-0310-9956-ffa450edef68
Raghu Angadi 17 anos atrás
pai
commit
da2b582a0b

+ 3 - 0
CHANGES.txt

@@ -14,6 +14,9 @@ Trunk (unreleased changes)
     accessible via a NFS mount. (shv)
 
   IMPROVEMENTS
+   
+    HADOOP-2928. Remove deprecated FileSystem.getContentLength().
+    (Lohit Vjayarenu via rangadi)
 
   OPTIMIZATIONS
 

+ 1 - 6
src/java/org/apache/hadoop/dfs/ChecksumDistributedFileSystem.java

@@ -49,11 +49,6 @@ public class ChecksumDistributedFileSystem extends ChecksumFileSystem {
     return (DistributedFileSystem)fs;
   }
 
-  @Override
-  public long getContentLength(Path f) throws IOException {
-    return fs.getContentLength(f);
-  }
-
   /** Return the total raw capacity of the filesystem, disregarding
    * replication .*/
   public long getRawCapacity() throws IOException{
@@ -128,4 +123,4 @@ public class ChecksumDistributedFileSystem extends ChecksumFileSystem {
     return getDFS().getFileStatus(f);
   }
 
-}
+}

+ 0 - 15
src/java/org/apache/hadoop/dfs/DistributedFileSystem.java

@@ -170,21 +170,6 @@ public class DistributedFileSystem extends FileSystem {
    return dfs.delete(getPathName(f), recursive);
   }
   
-  /** {@inheritDoc} */
-  @Deprecated
-  public long getContentLength(Path f) throws IOException {
-    // If it is a directory, then issue a getContentLength
-    // RPC to find the size of the entire subtree in one call.
-    //
-    if (f instanceof DfsPath) {
-      DfsPath dfspath = (DfsPath)f;
-      if (!dfspath.isDirectory()) {
-        return dfspath.getContentsLength();
-      }
-    }
-    return getContentSummary(f).getLength();
-  }
-
   /** {@inheritDoc} */
   public ContentSummary getContentSummary(Path f) throws IOException {
     return dfs.getContentSummary(getPathName(f));

+ 1 - 1
src/java/org/apache/hadoop/fs/ChecksumFileSystem.java

@@ -215,7 +215,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
     /* Return the file length */
     private long getFileLength() throws IOException {
       if( fileLen==-1L ) {
-        fileLen = fs.getContentLength(file);
+        fileLen = fs.getContentSummary(file).getLength();
       }
       return fileLen;
     }

+ 0 - 10
src/java/org/apache/hadoop/fs/FileSystem.java

@@ -628,16 +628,6 @@ public abstract class FileSystem extends Configured implements Closeable {
     return getFileStatus(f).getLen();
   }
     
-  /** Return the number of bytes of the given path 
-   * If <i>f</i> is a file, return the size of the file;
-   * If <i>f</i> is a directory, return the size of the directory tree
-   * @deprecated Use {@link #getContentSummary(Path)}.
-   */
-  @Deprecated
-  public long getContentLength(Path f) throws IOException {
-    return getContentSummary(f).getLength();
-  }
-
   /** Return the {@link ContentSummary} of a given {@link Path}. */
   public ContentSummary getContentSummary(Path f) throws IOException {
     FileStatus status = getFileStatus(f);

+ 3 - 11
src/java/org/apache/hadoop/fs/FsShell.java

@@ -694,17 +694,9 @@ public class FsShell extends Configured implements Tool {
           ": No such file or directory.");
     }
     for(int i=0; i<status.length; i++) {
-      FileStatus items[] = srcFs.listStatus(status[i].getPath());
-      if (items != null) {
-        long totalSize=0;
-        for(int j=0; j<items.length; j++) {
-          totalSize += srcFs.getContentLength(
-              items[j].getPath());
-        }
-        String pathStr = status[i].getPath().toString();
-        System.out.println(
-                           ("".equals(pathStr)?".":pathStr) + "\t" + totalSize);
-      }
+      long totalSize = srcFs.getContentSummary(status[i].getPath()).getLength();
+      String pathStr = status[i].getPath().toString();
+      System.out.println(("".equals(pathStr)?".":pathStr) + "\t" + totalSize);
     }
   }
 

+ 0 - 23
src/java/org/apache/hadoop/fs/kfs/KosmosFileSystem.java

@@ -127,29 +127,6 @@ public class KosmosFileSystem extends FileSystem {
         return kfsImpl.isFile(srep);
     }
 
-    public long getContentLength(Path path)  throws IOException {
-	Path absolute = makeAbsolute(path);
-        String srep = absolute.toUri().getPath();
-
-	if (kfsImpl.isFile(srep))
-	    return kfsImpl.filesize(srep);
-        
-	String[] entries = kfsImpl.readdir(srep);
-
-        if (entries == null)
-            return 0;
-
-        // kfsreaddir() returns "." and ".."; strip them before
-        // passing back to hadoop fs.
-	long numEntries = 0;
-	for (int i = 0; i < entries.length; i++) {
-	    if ((entries[i].compareTo(".") == 0) || (entries[i].compareTo("..") == 0))
-		continue;
-	    numEntries++;
-	}
-        return numEntries;
-    }
-
     public FileStatus[] listStatus(Path path) throws IOException {
 	Path absolute = makeAbsolute(path);
         String srep = absolute.toUri().getPath();

+ 1 - 1
src/java/org/apache/hadoop/mapred/MultiFileInputFormat.java

@@ -49,7 +49,7 @@ public abstract class MultiFileInputFormat<K, V>
       long totLength = 0;
       for(int i=0; i<paths.length; i++) {
         FileSystem fs = paths[i].getFileSystem(job);
-        lengths[i] = fs.getContentLength(paths[i]);
+        lengths[i] = fs.getContentSummary(paths[i]).getLength();
         totLength += lengths[i];
       }
       double avgLengthPerSplit = ((double)totLength) / numSplits;

+ 5 - 5
src/test/org/apache/hadoop/dfs/TestDFSPermission.java

@@ -657,7 +657,7 @@ public class TestDFSPermission extends TestCase {
   }
 
   /* A class that verifies the permission checking is correct for isDirectory,
-   * exist,  getFileInfo, getContentLength */
+   * exist,  getFileInfo, getContentSummary */
   private class StatsPermissionVerifier extends PermissionVerifier {
     OpType opType;
 
@@ -668,7 +668,7 @@ public class TestDFSPermission extends TestCase {
       setOpType(opType);
     }
 
-    /* set if operation is getFileInfo, isDirectory, exist, getContenLength */
+    /* set if operation is getFileInfo, isDirectory, exist, getContenSummary */
     void setOpType(OpType opType) {
       this.opType = opType;
     }
@@ -691,7 +691,7 @@ public class TestDFSPermission extends TestCase {
         fs.exists(path);
         break;
       case GET_CONTENT_LENGTH:
-        fs.getContentLength(path);
+        fs.getContentSummary(path).getLength();
         break;
       default:
         throw new IllegalArgumentException("Unexpected operation type: "
@@ -702,7 +702,7 @@ public class TestDFSPermission extends TestCase {
 
   private StatsPermissionVerifier statsVerifier = new StatsPermissionVerifier();
   /* test if the permission checking of isDirectory, exist,
-   * getFileInfo, getContentLength is correct */
+   * getFileInfo, getContentSummary is correct */
   private void testStats(UnixUserGroupInformation ugi, Path path,
       short ancestorPermission, short parentPermission) throws Exception {
     statsVerifier.set(path, OpType.GET_FILEINFO, ancestorPermission,
@@ -933,7 +933,7 @@ public class TestDFSPermission extends TestCase {
       checkNoPermissionDeny(e);
     }
     try {      
-      fs.getContentLength(NON_EXISTENT_FILE);
+      fs.getContentSummary(NON_EXISTENT_FILE).getLength();
     } catch (IOException e) {
       checkNoPermissionDeny(e);
     }

+ 2 - 3
src/test/org/apache/hadoop/dfs/TestFileStatus.java

@@ -111,7 +111,6 @@ public class TestFileStatus extends TestCase {
       assertTrue(fs.getFileStatus(file1).getBlockSize() == blockSize);
       assertTrue(fs.getFileStatus(file1).getReplication() == 1);
       assertTrue(fs.getFileStatus(file1).getLen() == fileSize);
-      assertTrue(fs.getContentLength(file1) == fileSize);
       System.out.println("Path : \"" + file1 + "\"");
 
       // create an empty directory
@@ -123,7 +122,7 @@ public class TestFileStatus extends TestCase {
       assertTrue(dir + " should be a directory", 
                  fs.getFileStatus(path).isDir() == true);
       assertTrue(dir + " should be zero size ",
-                 fs.getContentLength(dir) == 0);
+                 fs.getContentSummary(dir).getLength() == 0);
       assertTrue(dir + " should be zero size ",
                  fs.getFileStatus(dir).getLen() == 0);
       System.out.println("Dir : \"" + dir + "\"");
@@ -151,7 +150,7 @@ public class TestFileStatus extends TestCase {
       // verify that the size of the directory increased by the size 
       // of the two files
       assertTrue(dir + " size should be " + (blockSize/2), 
-                 blockSize/2 == fs.getContentLength(dir));
+                 blockSize/2 == fs.getContentSummary(dir).getLength());
     } finally {
       fs.close();
       cluster.shutdown();