浏览代码

HADOOP-1653. Cleanup of FsDirectory. Made INode a static class.
Contributed by Christophe Taton.



git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@561200 13f79535-47bb-0310-9956-ffa450edef68

Dhruba Borthakur 18 年之前
父节点
当前提交
35a17fa340

+ 3 - 0
CHANGES.txt

@@ -5,6 +5,9 @@ Trunk (unreleased changes)
   1. HADOOP-1636.  Allow configuration of the number of jobs kept in memory
   1. HADOOP-1636.  Allow configuration of the number of jobs kept in memory
      by the JobTracker. (Michael Bieniosek via omalley)
      by the JobTracker. (Michael Bieniosek via omalley)
 
 
+  2. HADOOP-1653. FSDirectory code-cleanups. FSDirectory.INode becomes a static
+     class.  (Christophe Taton via dhruba)
+
 Branch 0.14 (unreleased changes)
 Branch 0.14 (unreleased changes)
 
 
   1. HADOOP-1197.  In Configuration, deprecate getObject() and add
   1. HADOOP-1197.  In Configuration, deprecate getObject() and add

+ 1 - 1
src/java/org/apache/hadoop/dfs/DFSFileInfo.java

@@ -56,7 +56,7 @@ class DFSFileInfo implements Writable, FileStatus {
    * Create DFSFileInfo by file INode 
    * Create DFSFileInfo by file INode 
    */
    */
   public DFSFileInfo(FSDirectory.INode node) {
   public DFSFileInfo(FSDirectory.INode node) {
-    this.path = new Path(node.computeName());
+    this.path = new Path(node.getAbsoluteName());
     this.isDir = node.isDir();
     this.isDir = node.isDir();
     this.len = isDir ? node.computeContentsLength() : node.computeFileLength();
     this.len = isDir ? node.computeContentsLength() : node.computeFileLength();
     this.blockReplication = node.getReplication();
     this.blockReplication = node.getReplication();

+ 28 - 31
src/java/org/apache/hadoop/dfs/FSDirectory.java

@@ -20,7 +20,6 @@ package org.apache.hadoop.dfs;
 import java.io.*;
 import java.io.*;
 import java.util.*;
 import java.util.*;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.dfs.FSConstants.StartupOption;
 
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.metrics.MetricsRecord;
 import org.apache.hadoop.metrics.MetricsRecord;
@@ -44,7 +43,8 @@ class FSDirectory implements FSConstants {
    * 
    * 
    * TODO: Factor out INode to a standalone class.
    * TODO: Factor out INode to a standalone class.
    ******************************************************/
    ******************************************************/
-  class INode {
+  static class INode {
+
     private String name;
     private String name;
     private INode parent;
     private INode parent;
     private TreeMap<String, INode> children = null;
     private TreeMap<String, INode> children = null;
@@ -52,16 +52,6 @@ class FSDirectory implements FSConstants {
     private short blockReplication;
     private short blockReplication;
     private long modificationTime;
     private long modificationTime;
 
 
-    /**
-     */
-    INode(String name, Block blocks[], short replication) {
-      this.name = name;
-      this.parent = null;
-      this.blocks = blocks;
-      this.blockReplication = replication;
-      this.modificationTime = 0;
-    }
-
     /**
     /**
      */
      */
     INode(String name) {
     INode(String name) {
@@ -113,12 +103,29 @@ class FSDirectory implements FSConstants {
     String getLocalName() {
     String getLocalName() {
       return name;
       return name;
     }
     }
-    
+
+    /**
+     * Get the full absolute path name of this file (recursively computed).
+     * 
+     * @return the string representation of the absolute path of this file
+     */
     String getAbsoluteName() {
     String getAbsoluteName() {
-      // recursively constructs the absolute path.
-      // Any escaping of name required?
-      return ((parent != null) ? 
-              (parent.getAbsoluteName() + Path.SEPARATOR): "") + name;
+      return internalGetAbsolutePathName().toString();
+    }
+
+    /**
+     * Recursive computation of the absolute path name of this INode using a
+     * StringBuffer. This relies on the root INode name being "".
+     * 
+     * @return the StringBuffer containing the absolute path name.
+     */
+    private StringBuffer internalGetAbsolutePathName() {
+      if (parent == null) {
+        return new StringBuffer(name);
+      } else {
+        return parent.internalGetAbsolutePathName().append(
+            Path.SEPARATOR_CHAR).append(name);
+      }
     }
     }
 
 
     /**
     /**
@@ -270,16 +277,16 @@ class FSDirectory implements FSConstants {
      * This operation is performed after a node is removed from the tree,
      * This operation is performed after a node is removed from the tree,
      * and we want to GC all the blocks at this node and below.
      * and we want to GC all the blocks at this node and below.
      */
      */
-    void collectSubtreeBlocks(Vector<Block> v) {
+    void collectSubtreeBlocks(FSDirectory fsDir, Vector<Block> v) {
       if (blocks != null) {
       if (blocks != null) {
         for (int i = 0; i < blocks.length; i++) {
         for (int i = 0; i < blocks.length; i++) {
           v.add(blocks[i]);
           v.add(blocks[i]);
         }
         }
       }
       }
-      incrDeletedFileCount();
+      fsDir.incrDeletedFileCount();
       for (Iterator<INode> it = getChildIterator(); it != null &&
       for (Iterator<INode> it = getChildIterator(); it != null &&
              it.hasNext();) {
              it.hasNext();) {
-        it.next().collectSubtreeBlocks(v);
+        it.next().collectSubtreeBlocks(fsDir, v);
       }
       }
     }
     }
 
 
@@ -294,16 +301,6 @@ class FSDirectory implements FSConstants {
       return total + 1;
       return total + 1;
     }
     }
 
 
-    /**
-     */
-    String computeName() {
-      if (parent != null) {
-        return parent.computeName() + "/" + name;
-      } else {
-        return name;
-      }
-    }
-
     /**
     /**
      */
      */
     long computeFileLength() {
     long computeFileLength() {
@@ -642,7 +639,7 @@ class FSDirectory implements FSConstants {
                                         +src+" is removed");
                                         +src+" is removed");
           targetNode.getParent().setModificationTime(modificationTime);
           targetNode.getParent().setModificationTime(modificationTime);
           Vector<Block> v = new Vector<Block>();
           Vector<Block> v = new Vector<Block>();
-          targetNode.collectSubtreeBlocks(v);
+          targetNode.collectSubtreeBlocks(this, v);
           for (Block b : v) {
           for (Block b : v) {
             namesystem.blocksMap.removeINode(b);
             namesystem.blocksMap.removeINode(b);
           }
           }

+ 2 - 2
src/java/org/apache/hadoop/dfs/FSEditLog.java

@@ -478,7 +478,7 @@ class FSEditLog {
    */
    */
   void logCreateFile(FSDirectory.INode newNode) {
   void logCreateFile(FSDirectory.INode newNode) {
     UTF8 nameReplicationPair[] = new UTF8[] { 
     UTF8 nameReplicationPair[] = new UTF8[] { 
-      new UTF8(newNode.computeName()), 
+      new UTF8(newNode.getAbsoluteName()), 
       FSEditLog.toLogReplication(newNode.getReplication()),
       FSEditLog.toLogReplication(newNode.getReplication()),
       FSEditLog.toLogTimeStamp(newNode.getModificationTime())};
       FSEditLog.toLogTimeStamp(newNode.getModificationTime())};
     logEdit(OP_ADD,
     logEdit(OP_ADD,
@@ -491,7 +491,7 @@ class FSEditLog {
    */
    */
   void logMkDir(FSDirectory.INode newNode) {
   void logMkDir(FSDirectory.INode newNode) {
     UTF8 info[] = new UTF8[] {
     UTF8 info[] = new UTF8[] {
-      new UTF8(newNode.computeName()),
+      new UTF8(newNode.getAbsoluteName()),
       FSEditLog.toLogTimeStamp(newNode.getModificationTime())
       FSEditLog.toLogTimeStamp(newNode.getModificationTime())
     };
     };
     logEdit(OP_MKDIR, new ArrayWritable(UTF8.class, info), null);
     logEdit(OP_MKDIR, new ArrayWritable(UTF8.class, info), null);