1
0
فهرست منبع

HADOOP-3381. Clear referenced when directories are deleted so that
effect of memory leaks are not multiplied. (rangadi)


git-svn-id: https://svn.apache.org/repos/asf/hadoop/core/trunk@658862 13f79535-47bb-0310-9956-ffa450edef68

Raghu Angadi 17 سال پیش
والد
کامیت
cfc27bdc2c
3فایلهای تغییر یافته به همراه21 افزوده شده و 15 حذف شده
  1. 3 0
      CHANGES.txt
  2. 4 3
      src/java/org/apache/hadoop/dfs/FSDirectory.java
  3. 14 12
      src/java/org/apache/hadoop/dfs/INode.java

+ 3 - 0
CHANGES.txt

@@ -149,6 +149,9 @@ Trunk (unreleased changes)
     HADOOP-3398. Minor improvement to a utility function in that participates
     in backoff calculation. (cdouglas)
 
+    HADOOP-3381. Clear referenced when directories are deleted so that 
+    effect of memory leaks are not multiplied. (rangadi)
+
   OPTIMIZATIONS
 
     HADOOP-3274. The default constructor of BytesWritable creates empty 

+ 4 - 3
src/java/org/apache/hadoop/dfs/FSDirectory.java

@@ -534,16 +534,17 @@ class FSDirectory implements FSConstants {
         // Remove the node from the namespace and GC all
         // the blocks underneath the node.
         //
-        if (!targetNode.removeNode()) {
+        if (targetNode.getParent() == null) {
           NameNode.stateChangeLog.warn("DIR* FSDirectory.unprotectedDelete: "
                                        +"failed to remove "+src+" because it does not have a parent");
           return null;
         } else {
+          targetNode.getParent().setModificationTime(modificationTime);
+          targetNode.removeNode();
           NameNode.stateChangeLog.debug("DIR* FSDirectory.unprotectedDelete: "
                                         +src+" is removed");
-          targetNode.getParent().setModificationTime(modificationTime);
           ArrayList<Block> v = new ArrayList<Block>();
-          int filesRemoved = targetNode.collectSubtreeBlocks(v);
+          int filesRemoved = targetNode.collectSubtreeBlocksAndClear(v);
           incrDeletedFileCount(filesRemoved);
           totalInodes -= filesRemoved;
           for (Block b : v) {

+ 14 - 12
src/java/org/apache/hadoop/dfs/INode.java

@@ -144,7 +144,12 @@ abstract class INode implements Comparable<byte[]> {
    * Check whether it's a directory
    */
   abstract boolean isDirectory();
-  abstract int collectSubtreeBlocks(List<Block> v);
+  /**
+   * Collect all the blocks in all children of this INode.
+   * Count and return the number of files in the sub tree.
+   * Also clears references since this INode is deleted.
+   */
+  abstract int collectSubtreeBlocksAndClear(List<Block> v);
 
   /** Compute {@link ContentSummary}. */
   final ContentSummary computeContentSummary() {
@@ -264,6 +269,7 @@ abstract class INode implements Comparable<byte[]> {
     } else {
       
       parent.removeChild(this);
+      parent = null;
       return true;
     }
   }
@@ -612,18 +618,16 @@ class INodeDirectory extends INode {
     return children;
   }
 
-  /**
-   * Collect all the blocks in all children of this INode.
-   * Count and return the number of files in the sub tree.
-   */
-  int collectSubtreeBlocks(List<Block> v) {
+  int collectSubtreeBlocksAndClear(List<Block> v) {
     int total = 1;
     if (children == null) {
       return total;
     }
     for (INode child : children) {
-      total += child.collectSubtreeBlocks(v);
+      total += child.collectSubtreeBlocksAndClear(v);
     }
+    parent = null;
+    children = null;
     return total;
   }
 }
@@ -736,14 +740,12 @@ class INodeFile extends INode {
     this.blocks[idx] = blk;
   }
 
-  /**
-   * Collect all the blocks in this INode.
-   * Return the number of files in the sub tree.
-   */
-  int collectSubtreeBlocks(List<Block> v) {
+  int collectSubtreeBlocksAndClear(List<Block> v) {
+    parent = null;
     for (Block blk : blocks) {
       v.add(blk);
     }
+    blocks = null;
     return 1;
   }