Browse Source

HDFS-3164. svn merge -c 1307890 from trunk

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1307891 13f79535-47bb-0310-9956-ffa450edef68
Eli Collins 13 years ago
parent
commit
5fc21be8a9
20 changed files with 90 additions and 102 deletions
  1. 2 0
      hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
  2. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderLocal.java
  3. 33 16
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
  4. 9 26
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java
  5. 3 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsProtoUtil.java
  6. 3 7
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
  7. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
  8. 2 15
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeDescriptor.java
  9. 4 3
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
  10. 4 4
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
  11. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
  12. 5 5
      hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto
  13. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BlockReaderTestUtil.java
  14. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientRetries.java
  15. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCreation.java
  16. 11 10
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java
  17. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/token/block/TestBlockToken.java
  18. 3 3
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockRecovery.java
  19. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMultipleRegistrations.java
  20. 1 1
      hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestInterDatanodeProtocol.java

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -12,6 +12,8 @@ Release 2.0.0 - UNRELEASED
     
     
     HDFS-3138. Move DatanodeInfo#ipcPort to DatanodeID. (eli)
     HDFS-3138. Move DatanodeInfo#ipcPort to DatanodeID. (eli)
 
 
+    HDFS-3164. Move DatanodeInfo#hostName to DatanodeID. (eli)
+
   NEW FEATURES
   NEW FEATURES
 
 
     HDFS-2978. The NameNode should expose name dir statuses via JMX. (atm)
     HDFS-2978. The NameNode should expose name dir statuses via JMX. (atm)

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderLocal.java

@@ -228,7 +228,7 @@ class BlockReaderLocal implements BlockReader {
   private static BlockLocalPathInfo getBlockPathInfo(ExtendedBlock blk,
   private static BlockLocalPathInfo getBlockPathInfo(ExtendedBlock blk,
       DatanodeInfo node, Configuration conf, int timeout,
       DatanodeInfo node, Configuration conf, int timeout,
       Token<BlockTokenIdentifier> token) throws IOException {
       Token<BlockTokenIdentifier> token) throws IOException {
-    LocalDatanodeInfo localDatanodeInfo = getLocalDatanodeInfo(node.ipcPort);
+    LocalDatanodeInfo localDatanodeInfo = getLocalDatanodeInfo(node.getIpcPort());
     BlockLocalPathInfo pathinfo = null;
     BlockLocalPathInfo pathinfo = null;
     ClientDatanodeProtocol proxy = localDatanodeInfo.getDatanodeProxy(node,
     ClientDatanodeProtocol proxy = localDatanodeInfo.getDatanodeProxy(node,
         conf, timeout);
         conf, timeout);

+ 33 - 16
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java

@@ -24,7 +24,7 @@ import java.io.IOException;
 
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.hdfs.DeprecatedUTF8;
+import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.WritableComparable;
 
 
 /**
 /**
@@ -38,16 +38,17 @@ import org.apache.hadoop.io.WritableComparable;
 public class DatanodeID implements WritableComparable<DatanodeID> {
 public class DatanodeID implements WritableComparable<DatanodeID> {
   public static final DatanodeID[] EMPTY_ARRAY = {}; 
   public static final DatanodeID[] EMPTY_ARRAY = {}; 
 
 
-  public String name;       // hostname:port (data transfer port)
-  public String storageID;  // unique per cluster storageID
-  protected int infoPort;   // info server port
-  public int ipcPort;       // ipc server port
+  protected String name;       // IP:port (data transfer port)
+  protected String hostName;   // hostname
+  protected String storageID;  // unique per cluster storageID
+  protected int infoPort;      // info server port
+  protected int ipcPort;       // IPC server port
 
 
   /** Equivalent to DatanodeID(""). */
   /** Equivalent to DatanodeID(""). */
   public DatanodeID() {this("");}
   public DatanodeID() {this("");}
 
 
   /** Equivalent to DatanodeID(nodeName, "", -1, -1). */
   /** Equivalent to DatanodeID(nodeName, "", -1, -1). */
-  public DatanodeID(String nodeName) {this(nodeName, "", -1, -1);}
+  public DatanodeID(String nodeName) {this(nodeName, "", "", -1, -1);}
 
 
   /**
   /**
    * DatanodeID copy constructor
    * DatanodeID copy constructor
@@ -56,6 +57,7 @@ public class DatanodeID implements WritableComparable<DatanodeID> {
    */
    */
   public DatanodeID(DatanodeID from) {
   public DatanodeID(DatanodeID from) {
     this(from.getName(),
     this(from.getName(),
+        from.getHostName(),
         from.getStorageID(),
         from.getStorageID(),
         from.getInfoPort(),
         from.getInfoPort(),
         from.getIpcPort());
         from.getIpcPort());
@@ -63,14 +65,16 @@ public class DatanodeID implements WritableComparable<DatanodeID> {
   
   
   /**
   /**
    * Create DatanodeID
    * Create DatanodeID
-   * @param nodeName (hostname:portNumber) 
+   * @param node IP:port
+   * @param hostName hostname
    * @param storageID data storage ID
    * @param storageID data storage ID
    * @param infoPort info server port 
    * @param infoPort info server port 
    * @param ipcPort ipc server port
    * @param ipcPort ipc server port
    */
    */
-  public DatanodeID(String nodeName, String storageID,
-      int infoPort, int ipcPort) {
-    this.name = nodeName;
+  public DatanodeID(String name, String hostName,
+      String storageID, int infoPort, int ipcPort) {
+    this.name = name;
+    this.hostName = hostName;
     this.storageID = storageID;
     this.storageID = storageID;
     this.infoPort = infoPort;
     this.infoPort = infoPort;
     this.ipcPort = ipcPort;
     this.ipcPort = ipcPort;
@@ -80,6 +84,10 @@ public class DatanodeID implements WritableComparable<DatanodeID> {
     this.name = name;
     this.name = name;
   }
   }
 
 
+  public void setHostName(String hostName) {
+    this.hostName = hostName;
+  }
+
   public void setInfoPort(int infoPort) {
   public void setInfoPort(int infoPort) {
     this.infoPort = infoPort;
     this.infoPort = infoPort;
   }
   }
@@ -94,7 +102,14 @@ public class DatanodeID implements WritableComparable<DatanodeID> {
   public String getName() {
   public String getName() {
     return name;
     return name;
   }
   }
-  
+
+  /**
+   * @return hostname
+   */
+  public String getHostName() {
+    return (hostName == null || hostName.length() == 0) ? getHost() : hostName;
+  }
+
   /**
   /**
    * @return data storage ID.
    * @return data storage ID.
    */
    */
@@ -186,17 +201,19 @@ public class DatanodeID implements WritableComparable<DatanodeID> {
   /////////////////////////////////////////////////
   /////////////////////////////////////////////////
   @Override
   @Override
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
-    DeprecatedUTF8.writeString(out, name);
-    DeprecatedUTF8.writeString(out, storageID);
+    Text.writeString(out, name);
+    Text.writeString(out, hostName);
+    Text.writeString(out, storageID);
     out.writeShort(infoPort);
     out.writeShort(infoPort);
     out.writeShort(ipcPort);
     out.writeShort(ipcPort);
   }
   }
 
 
   @Override
   @Override
   public void readFields(DataInput in) throws IOException {
   public void readFields(DataInput in) throws IOException {
-    name = DeprecatedUTF8.readString(in);
-    storageID = DeprecatedUTF8.readString(in);
-    // the infoPort read could be negative, if the port is a large number (more
+    name = Text.readString(in);
+    hostName = Text.readString(in);
+    storageID = Text.readString(in);
+    // The port read could be negative, if the port is a large number (more
     // than 15 bits in storage size (but less than 16 bits).
     // than 15 bits in storage size (but less than 16 bits).
     // So chop off the first two bytes (and hence the signed bits) before 
     // So chop off the first two bytes (and hence the signed bits) before 
     // setting the field.
     // setting the field.

+ 9 - 26
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java

@@ -51,9 +51,6 @@ public class DatanodeInfo extends DatanodeID implements Node {
   protected long lastUpdate;
   protected long lastUpdate;
   protected int xceiverCount;
   protected int xceiverCount;
   protected String location = NetworkTopology.DEFAULT_RACK;
   protected String location = NetworkTopology.DEFAULT_RACK;
-
-  // The FQDN of the IP associated with the Datanode's hostname
-  protected String hostName = null;
   
   
   // Datanode administrative states
   // Datanode administrative states
   public enum AdminStates {
   public enum AdminStates {
@@ -110,30 +107,27 @@ public class DatanodeInfo extends DatanodeID implements Node {
     this.adminState = null;    
     this.adminState = null;    
   }
   }
   
   
-  public DatanodeInfo(DatanodeID nodeID, String location, String hostName) {
+  public DatanodeInfo(DatanodeID nodeID, String location) {
     this(nodeID);
     this(nodeID);
     this.location = location;
     this.location = location;
-    this.hostName = hostName;
   }
   }
   
   
-  public DatanodeInfo(DatanodeID nodeID, String location, String hostName,
+  public DatanodeInfo(DatanodeID nodeID, String location,
       final long capacity, final long dfsUsed, final long remaining,
       final long capacity, final long dfsUsed, final long remaining,
       final long blockPoolUsed, final long lastUpdate, final int xceiverCount,
       final long blockPoolUsed, final long lastUpdate, final int xceiverCount,
       final AdminStates adminState) {
       final AdminStates adminState) {
-    this(nodeID.getName(), nodeID.getStorageID(), nodeID.getInfoPort(), nodeID
+    this(nodeID.getName(), nodeID.getHostName(), nodeID.getStorageID(), nodeID.getInfoPort(), nodeID
         .getIpcPort(), capacity, dfsUsed, remaining, blockPoolUsed, lastUpdate,
         .getIpcPort(), capacity, dfsUsed, remaining, blockPoolUsed, lastUpdate,
-        xceiverCount, location, hostName, adminState);
+        xceiverCount, location, adminState);
   }
   }
 
 
   /** Constructor */
   /** Constructor */
-  public DatanodeInfo(final String name, final String storageID,
-      final int infoPort, final int ipcPort,
+  public DatanodeInfo(final String name, final String hostName,
+      final String storageID, final int infoPort, final int ipcPort,
       final long capacity, final long dfsUsed, final long remaining,
       final long capacity, final long dfsUsed, final long remaining,
       final long blockPoolUsed, final long lastUpdate, final int xceiverCount,
       final long blockPoolUsed, final long lastUpdate, final int xceiverCount,
-      final String networkLocation, final String hostName,
-      final AdminStates adminState) {
-    super(name, storageID, infoPort, ipcPort);
-
+      final String networkLocation, final AdminStates adminState) {
+    super(name, hostName, storageID, infoPort, ipcPort);
     this.capacity = capacity;
     this.capacity = capacity;
     this.dfsUsed = dfsUsed;
     this.dfsUsed = dfsUsed;
     this.remaining = remaining;
     this.remaining = remaining;
@@ -141,7 +135,6 @@ public class DatanodeInfo extends DatanodeID implements Node {
     this.lastUpdate = lastUpdate;
     this.lastUpdate = lastUpdate;
     this.xceiverCount = xceiverCount;
     this.xceiverCount = xceiverCount;
     this.location = networkLocation;
     this.location = networkLocation;
-    this.hostName = hostName;
     this.adminState = adminState;
     this.adminState = adminState;
   }
   }
   
   
@@ -221,15 +214,7 @@ public class DatanodeInfo extends DatanodeID implements Node {
   public synchronized void setNetworkLocation(String location) {
   public synchronized void setNetworkLocation(String location) {
     this.location = NodeBase.normalize(location);
     this.location = NodeBase.normalize(location);
   }
   }
-  
-  public String getHostName() {
-    return (hostName == null || hostName.length()==0) ? getHost() : hostName;
-  }
-  
-  public void setHostName(String host) {
-    hostName = host;
-  }
-  
+    
   /** A formatted string for reporting the status of the DataNode. */
   /** A formatted string for reporting the status of the DataNode. */
   public String getDatanodeReport() {
   public String getDatanodeReport() {
     StringBuilder buffer = new StringBuilder();
     StringBuilder buffer = new StringBuilder();
@@ -387,7 +372,6 @@ public class DatanodeInfo extends DatanodeID implements Node {
     out.writeLong(lastUpdate);
     out.writeLong(lastUpdate);
     out.writeInt(xceiverCount);
     out.writeInt(xceiverCount);
     Text.writeString(out, location);
     Text.writeString(out, location);
-    Text.writeString(out, hostName == null? "": hostName);
     WritableUtils.writeEnum(out, getAdminState());
     WritableUtils.writeEnum(out, getAdminState());
   }
   }
 
 
@@ -401,7 +385,6 @@ public class DatanodeInfo extends DatanodeID implements Node {
     this.lastUpdate = in.readLong();
     this.lastUpdate = in.readLong();
     this.xceiverCount = in.readInt();
     this.xceiverCount = in.readInt();
     this.location = Text.readString(in);
     this.location = Text.readString(in);
-    this.hostName = Text.readString(in);
     setAdminState(WritableUtils.readEnum(in, AdminStates.class));
     setAdminState(WritableUtils.readEnum(in, AdminStates.class));
   }
   }
 
 

+ 3 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsProtoUtil.java

@@ -85,6 +85,7 @@ public abstract class HdfsProtoUtil {
       DatanodeID dni) {
       DatanodeID dni) {
     return HdfsProtos.DatanodeIDProto.newBuilder()
     return HdfsProtos.DatanodeIDProto.newBuilder()
       .setName(dni.getName())
       .setName(dni.getName())
+      .setHostName(dni.getHostName())
       .setStorageID(dni.getStorageID())
       .setStorageID(dni.getStorageID())
       .setInfoPort(dni.getInfoPort())
       .setInfoPort(dni.getInfoPort())
       .setIpcPort(dni.getIpcPort())
       .setIpcPort(dni.getIpcPort())
@@ -94,6 +95,7 @@ public abstract class HdfsProtoUtil {
   private static DatanodeID fromProto(HdfsProtos.DatanodeIDProto idProto) {
   private static DatanodeID fromProto(HdfsProtos.DatanodeIDProto idProto) {
     return new DatanodeID(
     return new DatanodeID(
         idProto.getName(),
         idProto.getName(),
+        idProto.getHostName(),
         idProto.getStorageID(),
         idProto.getStorageID(),
         idProto.getInfoPort(),
         idProto.getInfoPort(),
         idProto.getIpcPort());
         idProto.getIpcPort());
@@ -111,7 +113,6 @@ public abstract class HdfsProtoUtil {
       .setLastUpdate(dni.getLastUpdate())
       .setLastUpdate(dni.getLastUpdate())
       .setXceiverCount(dni.getXceiverCount())
       .setXceiverCount(dni.getXceiverCount())
       .setLocation(dni.getNetworkLocation())
       .setLocation(dni.getNetworkLocation())
-      .setHostName(dni.getHostName())
       .setAdminState(HdfsProtos.DatanodeInfoProto.AdminState.valueOf(
       .setAdminState(HdfsProtos.DatanodeInfoProto.AdminState.valueOf(
           dni.getAdminState().name()))
           dni.getAdminState().name()))
       .build();
       .build();
@@ -119,7 +120,7 @@ public abstract class HdfsProtoUtil {
 
 
   public static DatanodeInfo fromProto(HdfsProtos.DatanodeInfoProto dniProto) {
   public static DatanodeInfo fromProto(HdfsProtos.DatanodeInfoProto dniProto) {
     DatanodeInfo dniObj = new DatanodeInfo(fromProto(dniProto.getId()),
     DatanodeInfo dniObj = new DatanodeInfo(fromProto(dniProto.getId()),
-        dniProto.getLocation(), dniProto.getHostName());
+        dniProto.getLocation());
 
 
     dniObj.setCapacity(dniProto.getCapacity());
     dniObj.setCapacity(dniProto.getCapacity());
     dniObj.setDfsUsed(dniProto.getDfsUsed());
     dniObj.setDfsUsed(dniProto.getDfsUsed());

+ 3 - 7
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java

@@ -204,12 +204,13 @@ public class PBHelper {
 
 
   // DatanodeId
   // DatanodeId
   public static DatanodeID convert(DatanodeIDProto dn) {
   public static DatanodeID convert(DatanodeIDProto dn) {
-    return new DatanodeID(dn.getName(), dn.getStorageID(), dn.getInfoPort(),
+    return new DatanodeID(dn.getName(), dn.getHostName(), dn.getStorageID(), dn.getInfoPort(),
         dn.getIpcPort());
         dn.getIpcPort());
   }
   }
 
 
   public static DatanodeIDProto convert(DatanodeID dn) {
   public static DatanodeIDProto convert(DatanodeID dn) {
-    return DatanodeIDProto.newBuilder().setName(dn.getName())
+    return DatanodeIDProto.newBuilder()
+        .setName(dn.getName()).setHostName(dn.getHostName())
         .setInfoPort(dn.getInfoPort()).setIpcPort(dn.getIpcPort())
         .setInfoPort(dn.getInfoPort()).setIpcPort(dn.getIpcPort())
         .setStorageID(dn.getStorageID()).build();
         .setStorageID(dn.getStorageID()).build();
   }
   }
@@ -442,7 +443,6 @@ public class PBHelper {
     return new DatanodeInfo(
     return new DatanodeInfo(
         PBHelper.convert(di.getId()),
         PBHelper.convert(di.getId()),
         di.hasLocation() ? di.getLocation() : null , 
         di.hasLocation() ? di.getLocation() : null , 
-        di.hasHostName() ? di.getHostName() : null,
         di.getCapacity(),  di.getDfsUsed(),  di.getRemaining(),
         di.getCapacity(),  di.getDfsUsed(),  di.getRemaining(),
         di.getBlockPoolUsed()  ,  di.getLastUpdate() , di.getXceiverCount() ,
         di.getBlockPoolUsed()  ,  di.getLastUpdate() , di.getXceiverCount() ,
         PBHelper.convert(di.getAdminState())); 
         PBHelper.convert(di.getAdminState())); 
@@ -451,9 +451,6 @@ public class PBHelper {
   static public DatanodeInfoProto convertDatanodeInfo(DatanodeInfo di) {
   static public DatanodeInfoProto convertDatanodeInfo(DatanodeInfo di) {
     if (di == null) return null;
     if (di == null) return null;
     DatanodeInfoProto.Builder builder = DatanodeInfoProto.newBuilder();
     DatanodeInfoProto.Builder builder = DatanodeInfoProto.newBuilder();
-    if (di.getHostName() != null) {
-      builder.setHostName(di.getHostName());
-    }
     if (di.getNetworkLocation() != null) {
     if (di.getNetworkLocation() != null) {
       builder.setLocation(di.getNetworkLocation());
       builder.setLocation(di.getNetworkLocation());
     }
     }
@@ -503,7 +500,6 @@ public class PBHelper {
     builder.setAdminState(PBHelper.convert(info.getAdminState()));
     builder.setAdminState(PBHelper.convert(info.getAdminState()));
     builder.setCapacity(info.getCapacity())
     builder.setCapacity(info.getCapacity())
         .setDfsUsed(info.getDfsUsed())
         .setDfsUsed(info.getDfsUsed())
-        .setHostName(info.getHostName())
         .setId(PBHelper.convert((DatanodeID)info))
         .setId(PBHelper.convert((DatanodeID)info))
         .setLastUpdate(info.getLastUpdate())
         .setLastUpdate(info.getLastUpdate())
         .setLocation(info.getNetworkLocation())
         .setLocation(info.getNetworkLocation())

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java

@@ -2620,7 +2620,7 @@ assert storedBlock.findDatanode(dn) < 0 : "Block " + block
     StringBuilder nodeList = new StringBuilder();
     StringBuilder nodeList = new StringBuilder();
     while (nodeIter.hasNext()) {
     while (nodeIter.hasNext()) {
       DatanodeDescriptor node = nodeIter.next();
       DatanodeDescriptor node = nodeIter.next();
-      nodeList.append(node.name);
+      nodeList.append(node.getName());
       nodeList.append(" ");
       nodeList.append(" ");
     }
     }
     LOG.info("Block: " + block + ", Expected Replicas: "
     LOG.info("Block: " + block + ", Expected Replicas: "
@@ -2630,7 +2630,7 @@ assert storedBlock.findDatanode(dn) < 0 : "Block " + block
         + ", excess replicas: " + num.excessReplicas()
         + ", excess replicas: " + num.excessReplicas()
         + ", Is Open File: " + fileINode.isUnderConstruction()
         + ", Is Open File: " + fileINode.isUnderConstruction()
         + ", Datanodes having this block: " + nodeList + ", Current Datanode: "
         + ", Datanodes having this block: " + nodeList + ", Current Datanode: "
-        + srcNode.name + ", Is current datanode decommissioning: "
+        + srcNode.getName() + ", Is current datanode decommissioning: "
         + srcNode.isDecommissionInProgress());
         + srcNode.isDecommissionInProgress());
   }
   }
   
   

+ 2 - 15
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeDescriptor.java

@@ -179,19 +179,7 @@ public class DatanodeDescriptor extends DatanodeInfo {
    */
    */
   public DatanodeDescriptor(DatanodeID nodeID, 
   public DatanodeDescriptor(DatanodeID nodeID, 
                             String networkLocation) {
                             String networkLocation) {
-    this(nodeID, networkLocation, null);
-  }
-  
-  /** DatanodeDescriptor constructor
-   * 
-   * @param nodeID id of the data node
-   * @param networkLocation location of the data node in network
-   * @param hostName it could be different from host specified for DatanodeID
-   */
-  public DatanodeDescriptor(DatanodeID nodeID, 
-                            String networkLocation,
-                            String hostName) {
-    this(nodeID, networkLocation, hostName, 0L, 0L, 0L, 0L, 0, 0);
+    this(nodeID, networkLocation, 0L, 0L, 0L, 0L, 0, 0);
   }
   }
   
   
   /** DatanodeDescriptor constructor
   /** DatanodeDescriptor constructor
@@ -227,14 +215,13 @@ public class DatanodeDescriptor extends DatanodeInfo {
    */
    */
   public DatanodeDescriptor(DatanodeID nodeID,
   public DatanodeDescriptor(DatanodeID nodeID,
                             String networkLocation,
                             String networkLocation,
-                            String hostName,
                             long capacity,
                             long capacity,
                             long dfsUsed,
                             long dfsUsed,
                             long remaining,
                             long remaining,
                             long bpused,
                             long bpused,
                             int xceiverCount,
                             int xceiverCount,
                             int failedVolumes) {
                             int failedVolumes) {
-    super(nodeID, networkLocation, hostName);
+    super(nodeID, networkLocation);
     updateHeartbeat(capacity, dfsUsed, remaining, bpused, xceiverCount, 
     updateHeartbeat(capacity, dfsUsed, remaining, bpused, xceiverCount, 
         failedVolumes);
         failedVolumes);
   }
   }

+ 4 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java

@@ -564,6 +564,7 @@ public class DatanodeManager {
       
       
     // update the datanode's name with ip:port
     // update the datanode's name with ip:port
     DatanodeID dnReg = new DatanodeID(dnAddress + ":" + nodeReg.getPort(),
     DatanodeID dnReg = new DatanodeID(dnAddress + ":" + nodeReg.getPort(),
+                                      hostName,
                                       nodeReg.getStorageID(),
                                       nodeReg.getStorageID(),
                                       nodeReg.getInfoPort(),
                                       nodeReg.getInfoPort(),
                                       nodeReg.getIpcPort());
                                       nodeReg.getIpcPort());
@@ -630,10 +631,10 @@ public class DatanodeManager {
     } 
     } 
 
 
     // this is a new datanode serving a new data storage
     // this is a new datanode serving a new data storage
-    if (nodeReg.getStorageID().equals("")) {
+    if ("".equals(nodeReg.getStorageID())) {
       // this data storage has never been registered
       // this data storage has never been registered
       // it is either empty or was created by pre-storageID version of DFS
       // it is either empty or was created by pre-storageID version of DFS
-      nodeReg.storageID = newStorageID();
+      nodeReg.setStorageID(newStorageID());
       if(NameNode.stateChangeLog.isDebugEnabled()) {
       if(NameNode.stateChangeLog.isDebugEnabled()) {
         NameNode.stateChangeLog.debug(
         NameNode.stateChangeLog.debug(
             "BLOCK* NameSystem.registerDatanode: "
             "BLOCK* NameSystem.registerDatanode: "
@@ -642,7 +643,7 @@ public class DatanodeManager {
     }
     }
     // register new datanode
     // register new datanode
     DatanodeDescriptor nodeDescr 
     DatanodeDescriptor nodeDescr 
-      = new DatanodeDescriptor(nodeReg, NetworkTopology.DEFAULT_RACK, hostName);
+      = new DatanodeDescriptor(nodeReg, NetworkTopology.DEFAULT_RACK);
     resolveNetworkLocation(nodeDescr);
     resolveNetworkLocation(nodeDescr);
     addDatanode(nodeDescr);
     addDatanode(nodeDescr);
     checkDecommissioning(nodeDescr, dnAddress);
     checkDecommissioning(nodeDescr, dnAddress);

+ 4 - 4
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java

@@ -167,9 +167,9 @@ import org.mortbay.util.ajax.JSON;
 
 
 import com.google.common.base.Preconditions;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Sets;
 import com.google.common.collect.Sets;
+import com.google.common.annotations.VisibleForTesting;
 import com.google.protobuf.BlockingService;
 import com.google.protobuf.BlockingService;
 
 
-
 /**********************************************************
 /**********************************************************
  * DataNode is a class (and program) that stores a set of
  * DataNode is a class (and program) that stores a set of
  * blocks for a DFS deployment.  A single deployment can
  * blocks for a DFS deployment.  A single deployment can
@@ -952,7 +952,7 @@ public class DataNode extends Configured
   
   
   public static void setNewStorageID(DatanodeID dnId) {
   public static void setNewStorageID(DatanodeID dnId) {
     LOG.info("Datanode is " + dnId);
     LOG.info("Datanode is " + dnId);
-    dnId.storageID = createNewStorageId(dnId.getPort());
+    dnId.setStorageID(createNewStorageId(dnId.getPort()));
   }
   }
   
   
   static String createNewStorageId(int port) {
   static String createNewStorageId(int port) {
@@ -2209,9 +2209,9 @@ public class DataNode extends Configured
     return true;
     return true;
   }
   }
   
   
-  /** Methods used by fault injection tests */
+  @VisibleForTesting
   public DatanodeID getDatanodeId() {
   public DatanodeID getDatanodeId() {
-    return new DatanodeID(getMachineName(), getStorageId(),
+    return new DatanodeID(getMachineName(), hostName, getStorageId(),
         infoServer.getPort(), getIpcPort());
         infoServer.getPort(), getIpcPort());
   }
   }
 
 

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java

@@ -306,6 +306,7 @@ public class JsonUtil {
 
 
     return new DatanodeInfo(
     return new DatanodeInfo(
         (String)m.get("name"),
         (String)m.get("name"),
+        (String)m.get("hostName"),
         (String)m.get("storageID"),
         (String)m.get("storageID"),
         (int)(long)(Long)m.get("infoPort"),
         (int)(long)(Long)m.get("infoPort"),
         (int)(long)(Long)m.get("ipcPort"),
         (int)(long)(Long)m.get("ipcPort"),
@@ -317,7 +318,6 @@ public class JsonUtil {
         (Long)m.get("lastUpdate"),
         (Long)m.get("lastUpdate"),
         (int)(long)(Long)m.get("xceiverCount"),
         (int)(long)(Long)m.get("xceiverCount"),
         (String)m.get("networkLocation"),
         (String)m.get("networkLocation"),
-        (String)m.get("hostName"),
         AdminStates.valueOf((String)m.get("adminState")));
         AdminStates.valueOf((String)m.get("adminState")));
   }
   }
 
 

+ 5 - 5
hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto

@@ -48,10 +48,11 @@ message BlockTokenIdentifierProto {
  * Identifies a Datanode
  * Identifies a Datanode
  */
  */
 message DatanodeIDProto {
 message DatanodeIDProto {
-  required string name = 1;      // hostname:portNumber
-  required string storageID = 2; // Unique storage id
-  required uint32 infoPort = 3;  // the port where the infoserver is running
-  required uint32 ipcPort = 4;   // the port where the ipc Server is running
+  required string name = 1;      // IP:port (data transfer port)
+  required string hostName = 2;  // hostname
+  required string storageID = 3; // unique storage id
+  required uint32 infoPort = 4;  // info server port
+  required uint32 ipcPort = 5;   // ipc server port
 }
 }
 
 
 /**
 /**
@@ -73,7 +74,6 @@ message DatanodeInfoProto {
   optional uint64 lastUpdate = 6 [default = 0];
   optional uint64 lastUpdate = 6 [default = 0];
   optional uint32 xceiverCount = 7 [default = 0];
   optional uint32 xceiverCount = 7 [default = 0];
   optional string location = 8;
   optional string location = 8;
-  optional string hostName = 9;
   enum AdminState {
   enum AdminState {
     NORMAL = 0;
     NORMAL = 0;
     DECOMMISSION_INPROGRESS = 1;
     DECOMMISSION_INPROGRESS = 1;

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BlockReaderTestUtil.java

@@ -158,7 +158,7 @@ public class BlockReaderTestUtil {
    */
    */
   public DataNode getDataNode(LocatedBlock testBlock) {
   public DataNode getDataNode(LocatedBlock testBlock) {
     DatanodeInfo[] nodes = testBlock.getLocations();
     DatanodeInfo[] nodes = testBlock.getLocations();
-    int ipcport = nodes[0].ipcPort;
+    int ipcport = nodes[0].getIpcPort();
     return cluster.getDataNode(ipcport);
     return cluster.getDataNode(ipcport);
   }
   }
 
 

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientRetries.java

@@ -627,7 +627,7 @@ public class TestDFSClientRetries extends TestCase {
 
 
     final InetSocketAddress addr = NetUtils.getConnectAddress(server);
     final InetSocketAddress addr = NetUtils.getConnectAddress(server);
     DatanodeID fakeDnId = new DatanodeID(
     DatanodeID fakeDnId = new DatanodeID(
-        "localhost:" + addr.getPort(), "fake-storage", 0, addr.getPort());
+        "localhost:" + addr.getPort(), "localhost", "fake-storage", 0, addr.getPort());
     
     
     ExtendedBlock b = new ExtendedBlock("fake-pool", new Block(12345L));
     ExtendedBlock b = new ExtendedBlock("fake-pool", new Block(12345L));
     LocatedBlock fakeBlock = new LocatedBlock(b, new DatanodeInfo[0]);
     LocatedBlock fakeBlock = new LocatedBlock(b, new DatanodeInfo[0]);

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCreation.java

@@ -844,7 +844,7 @@ public class TestFileCreation extends junit.framework.TestCase {
       LocatedBlock locatedblock = locations.getLocatedBlocks().get(0);
       LocatedBlock locatedblock = locations.getLocatedBlocks().get(0);
       int successcount = 0;
       int successcount = 0;
       for(DatanodeInfo datanodeinfo: locatedblock.getLocations()) {
       for(DatanodeInfo datanodeinfo: locatedblock.getLocations()) {
-        DataNode datanode = cluster.getDataNode(datanodeinfo.ipcPort);
+        DataNode datanode = cluster.getDataNode(datanodeinfo.getIpcPort());
         ExtendedBlock blk = locatedblock.getBlock();
         ExtendedBlock blk = locatedblock.getBlock();
         Block b = DataNodeTestUtils.getFSDataset(datanode).getStoredBlock(
         Block b = DataNodeTestUtils.getFSDataset(datanode).getStoredBlock(
             blk.getBlockPoolId(), blk.getBlockId());
             blk.getBlockPoolId(), blk.getBlockId());

+ 11 - 10
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java

@@ -130,7 +130,7 @@ public class TestPBHelper {
 
 
   @Test
   @Test
   public void testConvertDatanodeID() {
   public void testConvertDatanodeID() {
-    DatanodeID dn = new DatanodeID("node", "sid", 1, 2);
+    DatanodeID dn = new DatanodeID("node", "node", "sid", 1, 2);
     DatanodeIDProto dnProto = PBHelper.convert(dn);
     DatanodeIDProto dnProto = PBHelper.convert(dn);
     DatanodeID dn2 = PBHelper.convert(dnProto);
     DatanodeID dn2 = PBHelper.convert(dnProto);
     compare(dn, dn2);
     compare(dn, dn2);
@@ -141,6 +141,7 @@ public class TestPBHelper {
     assertEquals(dn.getInfoPort(), dn2.getInfoPort());
     assertEquals(dn.getInfoPort(), dn2.getInfoPort());
     assertEquals(dn.getIpcPort(), dn2.getIpcPort());
     assertEquals(dn.getIpcPort(), dn2.getIpcPort());
     assertEquals(dn.getName(), dn2.getName());
     assertEquals(dn.getName(), dn2.getName());
+    assertEquals(dn.getHostName(), dn2.getHostName());
     assertEquals(dn.getPort(), dn2.getPort());
     assertEquals(dn.getPort(), dn2.getPort());
     assertEquals(dn.getStorageID(), dn2.getStorageID());
     assertEquals(dn.getStorageID(), dn2.getStorageID());
   }
   }
@@ -279,8 +280,8 @@ public class TestPBHelper {
     return new ExtendedBlock("bpid", blkid, 100, 2);
     return new ExtendedBlock("bpid", blkid, 100, 2);
   }
   }
   
   
-  public DatanodeInfo getDNInfo() {
-    return new DatanodeInfo(new DatanodeID("node", "sid", 1, 2));
+  private DatanodeInfo getDNInfo() {
+    return new DatanodeInfo(new DatanodeID("node", "node", "sid", 1, 2));
   }
   }
   
   
   private void compare(DatanodeInfo dn1, DatanodeInfo dn2) {
   private void compare(DatanodeInfo dn1, DatanodeInfo dn2) {
@@ -400,12 +401,12 @@ public class TestPBHelper {
   @Test
   @Test
   public void testConvertLocatedBlock() {
   public void testConvertLocatedBlock() {
     DatanodeInfo [] dnInfos = new DatanodeInfo[3];
     DatanodeInfo [] dnInfos = new DatanodeInfo[3];
-    dnInfos[0] = new DatanodeInfo("host0", "0", 5000, 5001, 20000, 10001, 9999,
-        59, 69, 32, "local", "host0", AdminStates.DECOMMISSION_INPROGRESS);
-    dnInfos[1] = new DatanodeInfo("host1", "1", 5000, 5001, 20000, 10001, 9999,
-        59, 69, 32, "local", "host1", AdminStates.DECOMMISSIONED);
-    dnInfos[2] = new DatanodeInfo("host2", "2", 5000, 5001, 20000, 10001, 9999,
-        59, 69, 32, "local", "host1", AdminStates.NORMAL);
+    dnInfos[0] = new DatanodeInfo("host0", "host0", "0", 5000, 5001, 20000, 10001, 9999,
+        59, 69, 32, "local", AdminStates.DECOMMISSION_INPROGRESS);
+    dnInfos[1] = new DatanodeInfo("host1", "host1", "1", 5000, 5001, 20000, 10001, 9999,
+        59, 69, 32, "local", AdminStates.DECOMMISSIONED);
+    dnInfos[2] = new DatanodeInfo("host2", "host2", "2", 5000, 5001, 20000, 10001, 9999,
+        59, 69, 32, "local", AdminStates.NORMAL);
     LocatedBlock lb = new LocatedBlock(
     LocatedBlock lb = new LocatedBlock(
         new ExtendedBlock("bp12", 12345, 10, 53), dnInfos, 5, false);
         new ExtendedBlock("bp12", 12345, 10, 53), dnInfos, 5, false);
     LocatedBlockProto lbProto = PBHelper.convert(lb);
     LocatedBlockProto lbProto = PBHelper.convert(lb);
@@ -423,7 +424,7 @@ public class TestPBHelper {
   
   
   @Test
   @Test
   public void testConvertDatanodeRegistration() {
   public void testConvertDatanodeRegistration() {
-    DatanodeID dnId = new DatanodeID("host", "xyz", 1, 0);
+    DatanodeID dnId = new DatanodeID("host", "host", "xyz", 1, 0);
     BlockKey[] keys = new BlockKey[] { getBlockKey(2), getBlockKey(3) };
     BlockKey[] keys = new BlockKey[] { getBlockKey(2), getBlockKey(3) };
     ExportedBlockKeys expKeys = new ExportedBlockKeys(true, 9, 10,
     ExportedBlockKeys expKeys = new ExportedBlockKeys(true, 9, 10,
         getBlockKey(1), keys);
         getBlockKey(1), keys);

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/token/block/TestBlockToken.java

@@ -280,7 +280,7 @@ public class TestBlockToken {
 
 
     final InetSocketAddress addr = NetUtils.getConnectAddress(server);
     final InetSocketAddress addr = NetUtils.getConnectAddress(server);
     DatanodeID fakeDnId = new DatanodeID("localhost:" + addr.getPort(),
     DatanodeID fakeDnId = new DatanodeID("localhost:" + addr.getPort(),
-        "fake-storage", 0, addr.getPort());
+        "localhost", "fake-storage", 0, addr.getPort());
 
 
     ExtendedBlock b = new ExtendedBlock("fake-pool", new Block(12345L));
     ExtendedBlock b = new ExtendedBlock("fake-pool", new Block(12345L));
     LocatedBlock fakeBlock = new LocatedBlock(b, new DatanodeInfo[0]);
     LocatedBlock fakeBlock = new LocatedBlock(b, new DatanodeInfo[0]);

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockRecovery.java

@@ -197,9 +197,9 @@ public class TestBlockRecovery {
         locs, RECOVERY_ID);
         locs, RECOVERY_ID);
     ArrayList<BlockRecord> syncList = new ArrayList<BlockRecord>(2);
     ArrayList<BlockRecord> syncList = new ArrayList<BlockRecord>(2);
     BlockRecord record1 = new BlockRecord(
     BlockRecord record1 = new BlockRecord(
-        new DatanodeID("xx", "yy", 44, 55), dn1, replica1);
+        new DatanodeID("xx", "yy", "zz", 44, 55), dn1, replica1);
     BlockRecord record2 = new BlockRecord(
     BlockRecord record2 = new BlockRecord(
-        new DatanodeID("aa", "bb", 11, 22), dn2, replica2);
+        new DatanodeID("aa", "bb", "cc", 11, 22), dn2, replica2);
     syncList.add(record1);
     syncList.add(record1);
     syncList.add(record2);
     syncList.add(record2);
     
     
@@ -402,7 +402,7 @@ public class TestBlockRecovery {
   private Collection<RecoveringBlock> initRecoveringBlocks() throws IOException {
   private Collection<RecoveringBlock> initRecoveringBlocks() throws IOException {
     Collection<RecoveringBlock> blocks = new ArrayList<RecoveringBlock>(1);
     Collection<RecoveringBlock> blocks = new ArrayList<RecoveringBlock>(1);
     DatanodeInfo mockOtherDN = new DatanodeInfo(
     DatanodeInfo mockOtherDN = new DatanodeInfo(
-        new DatanodeID("127.0.0.1", "storage-1234", 0, 0));
+        new DatanodeID("127.0.0.1", "localhost", "storage-1234", 0, 0));
     DatanodeInfo[] locs = new DatanodeInfo[] {
     DatanodeInfo[] locs = new DatanodeInfo[] {
         new DatanodeInfo(dn.getDNRegistrationForBP(block.getBlockPoolId())),
         new DatanodeInfo(dn.getDNRegistrationForBP(block.getBlockPoolId())),
         mockOtherDN };
         mockOtherDN };

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMultipleRegistrations.java

@@ -161,8 +161,8 @@ public class TestDataNodeMultipleRegistrations {
       assertEquals("number of volumes is wrong", 2, volInfos.size());
       assertEquals("number of volumes is wrong", 2, volInfos.size());
 
 
       for (BPOfferService bpos : dn.getAllBpOs()) {
       for (BPOfferService bpos : dn.getAllBpOs()) {
-        LOG.info("reg: bpid=" + "; name=" + bpos.bpRegistration.name + "; sid="
-            + bpos.bpRegistration.storageID + "; nna=" +
+        LOG.info("reg: bpid=" + "; name=" + bpos.bpRegistration.getName() + "; sid="
+            + bpos.bpRegistration.getStorageID() + "; nna=" +
             getNNSocketAddress(bpos));
             getNNSocketAddress(bpos));
       }
       }
 
 

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestInterDatanodeProtocol.java

@@ -348,7 +348,7 @@ public class TestInterDatanodeProtocol {
 
 
     final InetSocketAddress addr = NetUtils.getConnectAddress(server);
     final InetSocketAddress addr = NetUtils.getConnectAddress(server);
     DatanodeID fakeDnId = new DatanodeID(
     DatanodeID fakeDnId = new DatanodeID(
-        "localhost:" + addr.getPort(), "fake-storage", 0, addr.getPort());
+        "localhost:" + addr.getPort(), "localhost", "fake-storage", 0, addr.getPort());
     DatanodeInfo dInfo = new DatanodeInfo(fakeDnId);
     DatanodeInfo dInfo = new DatanodeInfo(fakeDnId);
     InterDatanodeProtocol proxy = null;
     InterDatanodeProtocol proxy = null;