Explorar el Código

HDFS-995. Replace usage of FileStatus#isDir(). Contributed by Eli Collins.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hdfs/trunk@949827 13f79535-47bb-0310-9956-ffa450edef68
Thomas White hace 15 años
padre
commit
6899fc0137

+ 3 - 0
CHANGES.txt

@@ -553,6 +553,9 @@ Release 0.21.0 - Unreleased
     HDFS-1126. Change HDFS to depend on Hadoop 'common' artifacts instead
     HDFS-1126. Change HDFS to depend on Hadoop 'common' artifacts instead
     of 'core'. (tomwhite)
     of 'core'. (tomwhite)
 
 
+    HDFS-995.  Replace usage of FileStatus#isDir().  (Eli Collins via
+    tomwhite)
+
   OPTIMIZATIONS
   OPTIMIZATIONS
 
 
     HDFS-946. NameNode should not return full path name when lisitng a
     HDFS-946. NameNode should not return full path name when lisitng a

+ 2 - 2
src/contrib/thriftfs/src/java/org/apache/hadoop/thriftfs/HadoopThriftServer.java

@@ -415,7 +415,7 @@ public class HadoopThriftServer extends ThriftHadoopFileSystem {
         return new org.apache.hadoop.thriftfs.api.FileStatus(
         return new org.apache.hadoop.thriftfs.api.FileStatus(
           stat.getPath().toString(),
           stat.getPath().toString(),
           stat.getLen(),
           stat.getLen(),
-          stat.isDir(),
+          stat.isDirectory(),
           stat.getReplication(),
           stat.getReplication(),
           stat.getBlockSize(),
           stat.getBlockSize(),
           stat.getModificationTime(),
           stat.getModificationTime(),
@@ -448,7 +448,7 @@ public class HadoopThriftServer extends ThriftHadoopFileSystem {
           tmp = new org.apache.hadoop.thriftfs.api.FileStatus(
           tmp = new org.apache.hadoop.thriftfs.api.FileStatus(
                       stat[i].getPath().toString(),
                       stat[i].getPath().toString(),
                       stat[i].getLen(),
                       stat[i].getLen(),
-                      stat[i].isDir(),
+                      stat[i].isDirectory(),
                       stat[i].getReplication(),
                       stat[i].getReplication(),
                       stat[i].getBlockSize(),
                       stat[i].getBlockSize(),
                       stat[i].getModificationTime(),
                       stat[i].getModificationTime(),

+ 1 - 1
src/java/org/apache/hadoop/hdfs/HftpFileSystem.java

@@ -220,7 +220,7 @@ public class HftpFileSystem extends FileSystem {
 
 
     public FileStatus[] listStatus(Path f, boolean recur) throws IOException {
     public FileStatus[] listStatus(Path f, boolean recur) throws IOException {
       fetchList(f.toUri().getPath(), recur);
       fetchList(f.toUri().getPath(), recur);
-      if (fslist.size() > 0 && (fslist.size() != 1 || fslist.get(0).isDir())) {
+      if (fslist.size() > 0 && (fslist.size() != 1 || fslist.get(0).isDirectory())) {
         fslist.remove(0);
         fslist.remove(0);
       }
       }
       return fslist.toArray(new FileStatus[0]);
       return fslist.toArray(new FileStatus[0]);

+ 3 - 2
src/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java

@@ -605,9 +605,10 @@ class FSDirectory implements Closeable {
         throw new IOException(error);
         throw new IOException(error);
       }
       }
       if (dstInode != null) { // Destination exists
       if (dstInode != null) { // Destination exists
+        // It's OK to rename a file to a symlink and vice versa
         if (dstInode.isDirectory() != srcInode.isDirectory()) {
         if (dstInode.isDirectory() != srcInode.isDirectory()) {
-          error = "Source " + src + " Destination " + dst
-              + " both should be either file or directory";
+          error = "Source " + src + " and destination " + dst
+              + " must both be directories";
           NameNode.stateChangeLog.warn("DIR* FSDirectory.unprotectedRenameTo: "
           NameNode.stateChangeLog.warn("DIR* FSDirectory.unprotectedRenameTo: "
               + error);
               + error);
           throw new IOException(error);
           throw new IOException(error);

+ 1 - 1
src/test/hdfs/org/apache/hadoop/hdfs/TestDFSClientRetries.java

@@ -400,7 +400,7 @@ public class TestDFSClientRetries extends TestCase {
       
       
       // verify that file exists in FS namespace
       // verify that file exists in FS namespace
       assertTrue(file1 + " should be a file", 
       assertTrue(file1 + " should be a file", 
-                  fs.getFileStatus(file1).isDir() == false);
+                  fs.getFileStatus(file1).isFile());
       System.out.println("Path : \"" + file1 + "\"");
       System.out.println("Path : \"" + file1 + "\"");
       LOG.info("Path : \"" + file1 + "\"");
       LOG.info("Path : \"" + file1 + "\"");
 
 

+ 1 - 1
src/test/hdfs/org/apache/hadoop/hdfs/TestDFSShell.java

@@ -73,7 +73,7 @@ public class TestDFSShell extends TestCase {
   static Path mkdir(FileSystem fs, Path p) throws IOException {
   static Path mkdir(FileSystem fs, Path p) throws IOException {
     assertTrue(fs.mkdirs(p));
     assertTrue(fs.mkdirs(p));
     assertTrue(fs.exists(p));
     assertTrue(fs.exists(p));
-    assertTrue(fs.getFileStatus(p).isDir());
+    assertTrue(fs.getFileStatus(p).isDirectory());
     return p;
     return p;
   }
   }
 
 

+ 1 - 1
src/test/hdfs/org/apache/hadoop/hdfs/TestDFSUpgradeFromImage.java

@@ -134,7 +134,7 @@ public class TestDFSUpgradeFromImage extends TestCase {
     TreeMap<Path, Boolean> fileMap = new TreeMap<Path, Boolean>();
     TreeMap<Path, Boolean> fileMap = new TreeMap<Path, Boolean>();
     
     
     for(FileStatus file : fileArr) {
     for(FileStatus file : fileArr) {
-      fileMap.put(file.getPath(), Boolean.valueOf(file.isDir()));
+      fileMap.put(file.getPath(), Boolean.valueOf(file.isDirectory()));
     }
     }
     
     
     for(Iterator<Path> it = fileMap.keySet().iterator(); it.hasNext();) {
     for(Iterator<Path> it = fileMap.keySet().iterator(); it.hasNext();) {

+ 4 - 4
src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreation.java

@@ -170,9 +170,9 @@ public class TestFileCreation extends junit.framework.TestCase {
       //
       //
       Path path = new Path("/");
       Path path = new Path("/");
       System.out.println("Path : \"" + path.toString() + "\"");
       System.out.println("Path : \"" + path.toString() + "\"");
-      System.out.println(fs.getFileStatus(path).isDir()); 
+      System.out.println(fs.getFileStatus(path).isDirectory()); 
       assertTrue("/ should be a directory", 
       assertTrue("/ should be a directory", 
-                 fs.getFileStatus(path).isDir() == true);
+                 fs.getFileStatus(path).isDirectory());
 
 
       //
       //
       // Create a directory inside /, then try to overwrite it
       // Create a directory inside /, then try to overwrite it
@@ -201,7 +201,7 @@ public class TestFileCreation extends junit.framework.TestCase {
 
 
       // verify that file exists in FS namespace
       // verify that file exists in FS namespace
       assertTrue(file1 + " should be a file", 
       assertTrue(file1 + " should be a file", 
-                  fs.getFileStatus(file1).isDir() == false);
+                 fs.getFileStatus(file1).isFile());
       System.out.println("Path : \"" + file1 + "\"");
       System.out.println("Path : \"" + file1 + "\"");
 
 
       // write to file
       // write to file
@@ -321,7 +321,7 @@ public class TestFileCreation extends junit.framework.TestCase {
 
 
       // verify that file exists in FS namespace
       // verify that file exists in FS namespace
       assertTrue(file1 + " should be a file", 
       assertTrue(file1 + " should be a file", 
-                  fs.getFileStatus(file1).isDir() == false);
+                 fs.getFileStatus(file1).isFile());
       System.out.println("Path : \"" + file1 + "\"");
       System.out.println("Path : \"" + file1 + "\"");
 
 
       // kill the datanode
       // kill the datanode

+ 5 - 5
src/test/hdfs/org/apache/hadoop/hdfs/TestFileStatus.java

@@ -104,7 +104,7 @@ public class TestFileStatus {
     // Check that / exists
     // Check that / exists
     Path path = new Path("/");
     Path path = new Path("/");
     assertTrue("/ should be a directory", 
     assertTrue("/ should be a directory", 
-               fs.getFileStatus(path).isDir());
+               fs.getFileStatus(path).isDirectory());
     
     
     // Make sure getFileInfo returns null for files which do not exist
     // Make sure getFileInfo returns null for files which do not exist
     HdfsFileStatus fileInfo = dfsClient.getFileInfo("/noSuchFile");
     HdfsFileStatus fileInfo = dfsClient.getFileInfo("/noSuchFile");
@@ -127,7 +127,7 @@ public class TestFileStatus {
     checkFile(fs, file1, 1);
     checkFile(fs, file1, 1);
     // test getFileStatus on a file
     // test getFileStatus on a file
     FileStatus status = fs.getFileStatus(file1);
     FileStatus status = fs.getFileStatus(file1);
-    assertFalse(file1 + " should be a file", status.isDir());
+    assertFalse(file1 + " should be a file", status.isDirectory());
     assertEquals(blockSize, status.getBlockSize());
     assertEquals(blockSize, status.getBlockSize());
     assertEquals(1, status.getReplication());
     assertEquals(1, status.getReplication());
     assertEquals(fileSize, status.getLen());
     assertEquals(fileSize, status.getLen());
@@ -142,7 +142,7 @@ public class TestFileStatus {
     FileStatus[] stats = fs.listStatus(file1);
     FileStatus[] stats = fs.listStatus(file1);
     assertEquals(1, stats.length);
     assertEquals(1, stats.length);
     FileStatus status = stats[0];
     FileStatus status = stats[0];
-    assertFalse(file1 + " should be a file", status.isDir());
+    assertFalse(file1 + " should be a file", status.isDirectory());
     assertEquals(blockSize, status.getBlockSize());
     assertEquals(blockSize, status.getBlockSize());
     assertEquals(1, status.getReplication());
     assertEquals(1, status.getReplication());
     assertEquals(fileSize, status.getLen());
     assertEquals(fileSize, status.getLen());
@@ -153,7 +153,7 @@ public class TestFileStatus {
     Iterator<FileStatus> itor = fc.listStatus(file1);
     Iterator<FileStatus> itor = fc.listStatus(file1);
     status = itor.next();
     status = itor.next();
     assertEquals(stats[0], status);
     assertEquals(stats[0], status);
-    assertFalse(file1 + " should be a file", status.isDir()); 
+    assertFalse(file1 + " should be a file", status.isDirectory());
   }
   }
 
 
   /** Test getting a FileStatus object using a non-existant path */
   /** Test getting a FileStatus object using a non-existant path */
@@ -192,7 +192,7 @@ public class TestFileStatus {
     
     
     // test getFileStatus on an empty directory
     // test getFileStatus on an empty directory
     FileStatus status = fs.getFileStatus(dir);
     FileStatus status = fs.getFileStatus(dir);
-    assertTrue(dir + " should be a directory", status.isDir());
+    assertTrue(dir + " should be a directory", status.isDirectory());
     assertTrue(dir + " should be zero size ", status.getLen() == 0);
     assertTrue(dir + " should be zero size ", status.getLen() == 0);
     assertEquals(dir.makeQualified(fs.getUri(), 
     assertEquals(dir.makeQualified(fs.getUri(), 
         fs.getWorkingDirectory()).toString(), 
         fs.getWorkingDirectory()).toString(), 

+ 2 - 1
src/test/hdfs/org/apache/hadoop/hdfs/TestListPathServlet.java

@@ -114,7 +114,8 @@ public class TestListPathServlet {
     }
     }
     for (FileStatus status : statuslist) {
     for (FileStatus status : statuslist) {
       System.out.println("status:" + status.getPath().toString() + " type "
       System.out.println("status:" + status.getPath().toString() + " type "
-          + (status.isDir() ? "directory" : "file"));
+          + (status.isDirectory() ? "directory" 
+                                  : ( status.isFile() ? "file" : "symlink")));
     }
     }
     for (String file : filelist) {
     for (String file : filelist) {
       boolean found = false;
       boolean found = false;

+ 1 - 1
src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestFileLimit.java

@@ -95,7 +95,7 @@ public class TestFileLimit extends TestCase {
       //
       //
       Path path = new Path("/");
       Path path = new Path("/");
       assertTrue("/ should be a directory", 
       assertTrue("/ should be a directory", 
-                 fs.getFileStatus(path).isDir() == true);
+                 fs.getFileStatus(path).isDirectory());
       currentNodes = 1;          // root inode
       currentNodes = 1;          // root inode
 
 
       // verify that we can create the specified number of files. We leave
       // verify that we can create the specified number of files. We leave

+ 1 - 1
src/test/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java

@@ -259,7 +259,7 @@ public class TestOfflineImageViewer extends TestCase {
   // the output of the ls file from the image processor
   // the output of the ls file from the image processor
   private void compareFiles(FileStatus fs, LsElements elements) {
   private void compareFiles(FileStatus fs, LsElements elements) {
     assertEquals("directory listed as such",  
     assertEquals("directory listed as such",  
-                                        fs.isDir() ? 'd' : '-', elements.dir);
+                 fs.isDirectory() ? 'd' : '-', elements.dir);
     assertEquals("perms string equal", 
     assertEquals("perms string equal", 
                                 fs.getPermission().toString(), elements.perms);
                                 fs.getPermission().toString(), elements.perms);
     assertEquals("replication equal", fs.getReplication(), elements.replication);
     assertEquals("replication equal", fs.getReplication(), elements.replication);

+ 1 - 1
src/test/hdfs/org/apache/hadoop/security/TestPermission.java

@@ -52,7 +52,7 @@ public class TestPermission extends TestCase {
   static FsPermission checkPermission(FileSystem fs,
   static FsPermission checkPermission(FileSystem fs,
       String path, FsPermission expected) throws IOException {
       String path, FsPermission expected) throws IOException {
     FileStatus s = fs.getFileStatus(new Path(path));
     FileStatus s = fs.getFileStatus(new Path(path));
-    LOG.info(s.getPath() + ": " + s.isDir() + " " + s.getPermission()
+    LOG.info(s.getPath() + ": " + s.isDirectory() + " " + s.getPermission()
         + ":" + s.getOwner() + ":" + s.getGroup());
         + ":" + s.getOwner() + ":" + s.getGroup());
     if (expected != null) {
     if (expected != null) {
       assertEquals(expected, s.getPermission());
       assertEquals(expected, s.getPermission());

+ 1 - 1
src/test/unit/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java

@@ -43,7 +43,7 @@ public class TestDataDirs {
     FsPermission badPerm = new FsPermission("000");
     FsPermission badPerm = new FsPermission("000");
     FileStatus stat = make(stub(FileStatus.class)
     FileStatus stat = make(stub(FileStatus.class)
         .returning(normalPerm, normalPerm, badPerm).from.getPermission());
         .returning(normalPerm, normalPerm, badPerm).from.getPermission());
-    when(stat.isDir()).thenReturn(true);
+    when(stat.isDirectory()).thenReturn(true);
     LocalFileSystem fs = make(stub(LocalFileSystem.class)
     LocalFileSystem fs = make(stub(LocalFileSystem.class)
         .returning(stat).from.getFileStatus(any(Path.class)));
         .returning(stat).from.getFileStatus(any(Path.class)));
     when(fs.pathToFile(any(Path.class))).thenReturn(localDir);
     when(fs.pathToFile(any(Path.class))).thenReturn(localDir);