Browse Source

HADOOP-880. Fix S3 FileSystem to remove directories. Contributed by Tom White.

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@494989 13f79535-47bb-0310-9956-ffa450edef68
Doug Cutting 18 years ago
parent
commit
554cc853a2

+ 3 - 0
CHANGES.txt

@@ -36,6 +36,9 @@ Trunk (unreleased changes)
     than the default, zlib-based compression, but it is only available
     than the default, zlib-based compression, but it is only available
     when the native library is built.  (Arun C Murthy via cutting)
     when the native library is built.  (Arun C Murthy via cutting)
 
 
+12. HADOOP-880.  Fix S3 FileSystem to remove directories.
+    (Tom White via cutting)
+
 
 
 Release 0.10.0 - 2007-01-05
 Release 0.10.0 - 2007-01-05
 
 

+ 12 - 2
src/java/org/apache/hadoop/fs/s3/S3FileSystem.java

@@ -188,17 +188,27 @@ public class S3FileSystem extends FileSystem {
 
 
   @Override
   @Override
   public boolean deleteRaw(Path path) throws IOException {
   public boolean deleteRaw(Path path) throws IOException {
-    // TODO: Check if path is directory with children
     Path absolutePath = makeAbsolute(path);
     Path absolutePath = makeAbsolute(path);
     INode inode = store.getINode(absolutePath);
     INode inode = store.getINode(absolutePath);
     if (inode == null) {
     if (inode == null) {
       return false;
       return false;
     }
     }
-    store.deleteINode(absolutePath);
     if (inode.isFile()) {
     if (inode.isFile()) {
+      store.deleteINode(absolutePath);
       for (Block block : inode.getBlocks()) {
       for (Block block : inode.getBlocks()) {
         store.deleteBlock(block);
         store.deleteBlock(block);
       }
       }
+    } else {
+      Path[] contents = listPathsRaw(absolutePath);
+      if (contents == null) {
+        return false;
+      }
+      for (Path p : contents) {
+        if (! deleteRaw(p)) {
+          return false;
+        }
+      }
+      store.deleteINode(absolutePath);
     }
     }
     return true;
     return true;
   }
   }

+ 37 - 11
src/test/org/apache/hadoop/fs/s3/S3FileSystemBaseTest.java

@@ -166,9 +166,7 @@ public abstract class S3FileSystemBaseTest extends TestCase {
     
     
     s3FileSystem.mkdirs(path.getParent());
     s3FileSystem.mkdirs(path.getParent());
 
 
-    FSOutputStream out = s3FileSystem.createRaw(path, false, (short) 1, BLOCK_SIZE);
-    out.write(data, 0, BLOCK_SIZE);
-    out.close();
+    createEmptyFile(path);
     
     
     assertTrue("Exists", s3FileSystem.exists(path));
     assertTrue("Exists", s3FileSystem.exists(path));
     assertEquals("Length", BLOCK_SIZE, s3FileSystem.getLength(path));
     assertEquals("Length", BLOCK_SIZE, s3FileSystem.getLength(path));
@@ -180,7 +178,7 @@ public abstract class S3FileSystemBaseTest extends TestCase {
       // Expected
       // Expected
     }
     }
     
     
-    out = s3FileSystem.createRaw(path, true, (short) 1, BLOCK_SIZE);
+    FSOutputStream out = s3FileSystem.createRaw(path, true, (short) 1, BLOCK_SIZE);
     out.write(data, 0, BLOCK_SIZE / 2);
     out.write(data, 0, BLOCK_SIZE / 2);
     out.close();
     out.close();
     
     
@@ -191,21 +189,46 @@ public abstract class S3FileSystemBaseTest extends TestCase {
 
 
   public void testWriteInNonExistentDirectory() throws IOException {
   public void testWriteInNonExistentDirectory() throws IOException {
     Path path = new Path("/test/hadoop/file");    
     Path path = new Path("/test/hadoop/file");    
-    FSOutputStream out = s3FileSystem.createRaw(path, false, (short) 1, BLOCK_SIZE);
-    out.write(data, 0, BLOCK_SIZE);
-    out.close();
+    createEmptyFile(path);
     
     
     assertTrue("Exists", s3FileSystem.exists(path));
     assertTrue("Exists", s3FileSystem.exists(path));
     assertEquals("Length", BLOCK_SIZE, s3FileSystem.getLength(path));
     assertEquals("Length", BLOCK_SIZE, s3FileSystem.getLength(path));
     assertTrue("Parent exists", s3FileSystem.exists(path.getParent()));
     assertTrue("Parent exists", s3FileSystem.exists(path.getParent()));
   }
   }
-  
+
   public void testDeleteNonExistentFile() throws IOException {
   public void testDeleteNonExistentFile() throws IOException {
     Path path = new Path("/test/hadoop/file");    
     Path path = new Path("/test/hadoop/file");    
     assertFalse("Doesn't exist", s3FileSystem.exists(path));
     assertFalse("Doesn't exist", s3FileSystem.exists(path));
     assertFalse("No deletion", s3FileSystem.delete(path));
     assertFalse("No deletion", s3FileSystem.delete(path));
   }
   }
 
 
+  public void testDeleteDirectory() throws IOException {
+    Path subdir = new Path("/test/hadoop");
+    Path dir = subdir.getParent();
+    Path root = dir.getParent();
+    s3FileSystem.mkdirs(subdir);
+    Path file1 = new Path(dir, "file1");
+    Path file2 = new Path(subdir, "file2");
+    
+    createEmptyFile(file1);
+    createEmptyFile(file2);
+    
+    assertTrue("root exists", s3FileSystem.exists(root));
+    assertTrue("dir exists", s3FileSystem.exists(dir));
+    assertTrue("file1 exists", s3FileSystem.exists(file1));
+    assertTrue("subdir exists", s3FileSystem.exists(subdir));
+    assertTrue("file2 exists", s3FileSystem.exists(file2));
+    
+    assertTrue("Delete", s3FileSystem.delete(dir));
+
+    assertTrue("root exists", s3FileSystem.exists(root));
+    assertFalse("dir exists", s3FileSystem.exists(dir));
+    assertFalse("file1 exists", s3FileSystem.exists(file1));
+    assertFalse("subdir exists", s3FileSystem.exists(subdir));
+    assertFalse("file2 exists", s3FileSystem.exists(file2));
+    
+  }
+
   public void testRename() throws Exception {
   public void testRename() throws Exception {
     int len = BLOCK_SIZE;
     int len = BLOCK_SIZE;
     
     
@@ -213,9 +236,7 @@ public abstract class S3FileSystemBaseTest extends TestCase {
     
     
     s3FileSystem.mkdirs(path.getParent());
     s3FileSystem.mkdirs(path.getParent());
 
 
-    FSOutputStream out = s3FileSystem.createRaw(path, false, (short) 1, BLOCK_SIZE);
-    out.write(data, 0, len);
-    out.close();
+    createEmptyFile(path);
 
 
     assertTrue("Exists", s3FileSystem.exists(path));
     assertTrue("Exists", s3FileSystem.exists(path));
 
 
@@ -235,5 +256,10 @@ public abstract class S3FileSystemBaseTest extends TestCase {
     }
     }
   }
   }
 
 
+  private void createEmptyFile(Path path) throws IOException {
+    FSOutputStream out = s3FileSystem.createRaw(path, false, (short) 1, BLOCK_SIZE);
+    out.write(data, 0, BLOCK_SIZE);
+    out.close();
+  }
 
 
 }
 }