Browse Source

HDFS-2729. Update BlockManager's comments regarding the invalid block set (harsh)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1225591 13f79535-47bb-0310-9956-ffa450edef68
Harsh J 13 years ago
parent
commit
6c349f9420

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -119,6 +119,8 @@ Trunk (unreleased changes)
 
 
     HDFS-2726. Fix a logging issue under DFSClient's createBlockOutputStream method (harsh)
     HDFS-2726. Fix a logging issue under DFSClient's createBlockOutputStream method (harsh)
 
 
+    HDFS-2729. Update BlockManager's comments regarding the invalid block set (harsh)
+
   OPTIMIZATIONS
   OPTIMIZATIONS
     HDFS-2477. Optimize computing the diff between a block report and the
     HDFS-2477. Optimize computing the diff between a block report and the
     namenode state. (Tomasz Nykiel via hairong)
     namenode state. (Tomasz Nykiel via hairong)

+ 4 - 4
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java

@@ -1492,7 +1492,7 @@ public class BlockManager {
     // Ignore replicas already scheduled to be removed from the DN
     // Ignore replicas already scheduled to be removed from the DN
     if(invalidateBlocks.contains(dn.getStorageID(), block)) {
     if(invalidateBlocks.contains(dn.getStorageID(), block)) {
       assert storedBlock.findDatanode(dn) < 0 : "Block " + block
       assert storedBlock.findDatanode(dn) < 0 : "Block " + block
-        + " in recentInvalidatesSet should not appear in DN " + dn;
+        + " in invalidated blocks set should not appear in DN " + dn;
       return storedBlock;
       return storedBlock;
     }
     }
 
 
@@ -1720,7 +1720,7 @@ public class BlockManager {
    * Invalidate corrupt replicas.
    * Invalidate corrupt replicas.
    * <p>
    * <p>
    * This will remove the replicas from the block's location list,
    * This will remove the replicas from the block's location list,
-   * add them to {@link #recentInvalidateSets} so that they could be further
+   * add them to {@link #invalidateBlocks} so that they could be further
    * deleted from the respective data-nodes,
    * deleted from the respective data-nodes,
    * and remove the block from corruptReplicasMap.
    * and remove the block from corruptReplicasMap.
    * <p>
    * <p>
@@ -1949,7 +1949,7 @@ public class BlockManager {
       //
       //
       addToInvalidates(b, cur);
       addToInvalidates(b, cur);
       NameNode.stateChangeLog.info("BLOCK* chooseExcessReplicates: "
       NameNode.stateChangeLog.info("BLOCK* chooseExcessReplicates: "
-                +"("+cur.getName()+", "+b+") is added to recentInvalidateSets");
+                +"("+cur.getName()+", "+b+") is added to invalidated blocks set.");
     }
     }
   }
   }
 
 
@@ -2365,7 +2365,7 @@ public class BlockManager {
 
 
   /**
   /**
    * Get blocks to invalidate for <i>nodeId</i>
    * Get blocks to invalidate for <i>nodeId</i>
-   * in {@link #recentInvalidateSets}.
+   * in {@link #invalidateBlocks}.
    *
    *
    * @return number of blocks scheduled for removal during this iteration.
    * @return number of blocks scheduled for removal during this iteration.
    */
    */