Browse Source

HDFS-8456. Introduce STORAGE_CONTAINER_SERVICE as a new NodeType. (Contributed by Arpit Agarwal)

Arpit Agarwal 10 years ago
parent
commit
e2f494c2c4

+ 9 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-7240.txt

@@ -0,0 +1,9 @@
+  Breakdown of HDFS-7240 sub-tasks:
+
+    HDFS-8210. Ozone: Implement storage container manager. (Jitendra Pandey)
+
+    HDFS-8392. Ozone: DataNode support for multiple datasets. (Arpit Agarwal)
+
+    HDFS-8456. Ozone: Introduce STORAGE_CONTAINER_SERVICE as a new NodeType.
+    (Arpit Agarwal)
+

+ 37 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java

@@ -157,7 +157,8 @@ public class PBHelper {
   public static StorageInfoProto convert(StorageInfo info) {
     return StorageInfoProto.newBuilder().setClusterID(info.getClusterID())
         .setCTime(info.getCTime()).setLayoutVersion(info.getLayoutVersion())
-        .setNamespceID(info.getNamespaceID()).build();
+        .setNamespceID(info.getNamespaceID())
+        .setNodeType(convert(info.getNodeType())).build();
   }
 
   public static StorageInfo convert(StorageInfoProto info, NodeType type) {
@@ -334,9 +335,43 @@ public class PBHelper {
 
   public static NamespaceInfo convert(NamespaceInfoProto info) {
     StorageInfoProto storage = info.getStorageInfo();
+
+    // The default node type must be NAME_NODE for wire compatibility.
     return new NamespaceInfo(storage.getNamespceID(), storage.getClusterID(),
         info.getBlockPoolID(), storage.getCTime(), info.getBuildVersion(),
-        info.getSoftwareVersion(), info.getCapabilities());
+        info.getSoftwareVersion(),
+        storage.hasNodeType() ? convert(storage.getNodeType()) : NodeType.NAME_NODE,
+        info.getCapabilities());
+  }
+
+  public static NodeType convert(StorageInfoProto.NodeTypeProto nodeType) {
+    switch(nodeType) {
+    case NAME_NODE:
+      return NodeType.NAME_NODE;
+    case DATA_NODE:
+      return NodeType.DATA_NODE;
+    case JOURNAL_NODE:
+      return NodeType.JOURNAL_NODE;
+    case STORAGE_CONTAINER_SERVICE:
+      return NodeType.STORAGE_CONTAINER_SERVICE;
+    default:
+      throw new IllegalArgumentException("Unrecognized NodeType " + nodeType);
+    }
+  }
+
+  public static StorageInfoProto.NodeTypeProto convert(NodeType nodeType) {
+    switch(nodeType) {
+    case NAME_NODE:
+      return StorageInfoProto.NodeTypeProto.NAME_NODE;
+    case DATA_NODE:
+      return StorageInfoProto.NodeTypeProto.DATA_NODE;
+    case JOURNAL_NODE:
+      return StorageInfoProto.NodeTypeProto.JOURNAL_NODE;
+    case STORAGE_CONTAINER_SERVICE:
+      return StorageInfoProto.NodeTypeProto.STORAGE_CONTAINER_SERVICE;
+    default:
+      throw new IllegalArgumentException("Unrecognized NodeType " + nodeType);
+    }
   }
 
   public static NamenodeCommand convert(NamenodeCommandProto cmd) {

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java

@@ -92,7 +92,8 @@ public interface HdfsServerConstants {
   enum NodeType {
     NAME_NODE,
     DATA_NODE,
-    JOURNAL_NODE
+    JOURNAL_NODE,
+    STORAGE_CONTAINER_SERVICE
   }
 
   /** Startup options for rolling upgrade. */

+ 4 - 0
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/StorageInfo.java

@@ -256,4 +256,8 @@ public class StorageInfo {
     }
     return props;
   }
+
+  public NodeType getNodeType() {
+    return storageType;
+  }
 }

+ 17 - 5
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java

@@ -76,7 +76,12 @@ public class NamespaceInfo extends StorageInfo {
 
   // defaults to enabled capabilites since this ctor is for server
   public NamespaceInfo() {
-    super(NodeType.NAME_NODE);
+    this(NodeType.NAME_NODE);
+  }
+
+  // defaults to enabled capabilites since this ctor is for server
+  public NamespaceInfo(NodeType nodeType) {
+    super(nodeType);
     buildVersion = null;
     capabilities = CAPABILITIES_SUPPORTED;
   }
@@ -84,16 +89,17 @@ public class NamespaceInfo extends StorageInfo {
   // defaults to enabled capabilites since this ctor is for server
   public NamespaceInfo(int nsID, String clusterID, String bpID,
       long cT, String buildVersion, String softwareVersion) {
-    this(nsID, clusterID, bpID, cT, buildVersion, softwareVersion,
-        CAPABILITIES_SUPPORTED);
+    this(nsID, clusterID, bpID, cT, buildVersion,
+         softwareVersion, NodeType.NAME_NODE,
+         CAPABILITIES_SUPPORTED);
   }
 
   // for use by server and/or client
   public NamespaceInfo(int nsID, String clusterID, String bpID,
       long cT, String buildVersion, String softwareVersion,
-      long capabilities) {
+      NodeType nodeType, long capabilities) {
     super(HdfsServerConstants.NAMENODE_LAYOUT_VERSION, nsID, clusterID, cT,
-        NodeType.NAME_NODE);
+        nodeType);
     blockPoolID = bpID;
     this.buildVersion = buildVersion;
     this.softwareVersion = softwareVersion;
@@ -105,6 +111,12 @@ public class NamespaceInfo extends StorageInfo {
     this(nsID, clusterID, bpID, cT, Storage.getBuildVersion(),
         VersionInfo.getVersion());
   }
+
+  public NamespaceInfo(int nsID, String clusterID, String bpID,
+                       long cT, NodeType nodeType) {
+    this(nsID, clusterID, bpID, cT, Storage.getBuildVersion(),
+         VersionInfo.getVersion(), nodeType, CAPABILITIES_SUPPORTED);
+  }
   
   public long getCapabilities() {
     return capabilities;

+ 8 - 0
hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HdfsServer.proto

@@ -184,6 +184,14 @@ message StorageInfoProto {
   required uint32 namespceID = 2;    // File system namespace ID
   required string clusterID = 3;     // ID of the cluster
   required uint64 cTime = 4;         // File system creation time
+
+    enum NodeTypeProto {
+    NAME_NODE = 1;
+    DATA_NODE = 2;
+    JOURNAL_NODE = 3;
+    STORAGE_CONTAINER_SERVICE = 4;
+  }
+  optional NodeTypeProto nodeType = 5;
 }
 
 /**