Procházet zdrojové kódy

HADOOP-652. In DFS, when a file is deleted, the block count is now decremented. Contributed by Vladimir Krokhmalyov.

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@477392 13f79535-47bb-0310-9956-ffa450edef68
Doug Cutting před 18 roky
rodič
revize
e5f08bc551
2 změnil soubory, kde provedl 27 přidání a 7 odebrání
  1. 3 0
      CHANGES.txt
  2. 24 7
      src/java/org/apache/hadoop/dfs/FSDataset.java

+ 3 - 0
CHANGES.txt

@@ -88,6 +88,9 @@ Trunk (unreleased changes)
 26. HADOOP-695.  Fix a NullPointerException in contrib/streaming.
     (Hairong Kuang via cutting)
 
+27. HADOOP-652.  In DFS, when a file is deleted, the block count is
+    now decremented.  (Vladimir Krokhmalyov via cutting)
+
 
 Release 0.8.0 - 2006-11-03
 

+ 24 - 7
src/java/org/apache/hadoop/dfs/FSDataset.java

@@ -170,6 +170,16 @@ class FSDataset implements FSConstants {
             }
         }
         
+        void clearPath(File f) {
+          if (dir.compareTo(f) == 0) numBlocks--;
+          else {
+            if ((siblings != null) && (myIdx != (siblings.length - 1)))
+              siblings[myIdx + 1].clearPath(f);
+            else if (children != null)
+              children[0].clearPath(f);
+          }
+        }
+        
         public String toString() {
           return "FSDir{" +
               "dir=" + dir +
@@ -261,6 +271,10 @@ class FSDataset implements FSConstants {
         dataDir.getBlockMap(blockMap);
       }
       
+      void clearPath(File f) {
+        dataDir.clearPath(f);
+      }
+      
       public String toString() {
         return dir.getAbsolutePath();
       }
@@ -498,15 +512,18 @@ class FSDataset implements FSConstants {
      */
     public void invalidate(Block invalidBlks[]) throws IOException {
       for (int i = 0; i < invalidBlks.length; i++) {
-        synchronized ( this ) {
-          File f = getFile(invalidBlks[i]);
-          if (!f.delete()) {
-            throw new IOException("Unexpected error trying to delete block "
-                                  + invalidBlks[i] + " at file " + f);
-          }
+        File f;
+        synchronized (this) {
+          f = getFile(invalidBlks[i]);
+          FSVolume v = volumeMap.get(invalidBlks[i]);
+          v.clearPath(f.getParentFile());
           blockMap.remove(invalidBlks[i]);
           volumeMap.remove(invalidBlks[i]);
-        } 
+        }
+        if (!f.delete()) {
+            throw new IOException("Unexpected error trying to delete block "
+                                  + invalidBlks[i] + " at file " + f);
+        }
         DataNode.LOG.info("Deleting block " + invalidBlks[i]);
       }
     }