Browse Source

HDFS-17063. Support to configure different capacity reserved for each disk of DataNode. (#5793). Contributed by QI Jiale.

Reviewed-by : Tao Li <tomscut@apache.org>
Signed-off-by: He Xiaoqiao <hexiaoqiao@apache.org>
QI Jiale 1 year ago
parent
commit
28068aa320

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java

@@ -171,7 +171,8 @@ public class FsVolumeImpl implements FsVolumeSpi {
     this.usage = usage;
     this.usage = usage;
     if (this.usage != null) {
     if (this.usage != null) {
       reserved = new ReservedSpaceCalculator.Builder(conf)
       reserved = new ReservedSpaceCalculator.Builder(conf)
-          .setUsage(this.usage).setStorageType(storageType).build();
+          .setUsage(this.usage).setStorageType(storageType)
+          .setDir(currentDir != null ? currentDir.getParent() : "NULL").build();
       boolean fixedSizeVolume = conf.getBoolean(
       boolean fixedSizeVolume = conf.getBoolean(
           DFSConfigKeys.DFS_DATANODE_FIXED_VOLUME_SIZE_KEY,
           DFSConfigKeys.DFS_DATANODE_FIXED_VOLUME_SIZE_KEY,
           DFSConfigKeys.DFS_DATANODE_FIXED_VOLUME_SIZE_DEFAULT);
           DFSConfigKeys.DFS_DATANODE_FIXED_VOLUME_SIZE_DEFAULT);

+ 30 - 13
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ReservedSpaceCalculator.java

@@ -46,6 +46,8 @@ public abstract class ReservedSpaceCalculator {
     private DF usage;
     private DF usage;
     private StorageType storageType;
     private StorageType storageType;
 
 
+    private String dir;
+
     public Builder(Configuration conf) {
     public Builder(Configuration conf) {
       this.conf = conf;
       this.conf = conf;
     }
     }
@@ -61,6 +63,11 @@ public abstract class ReservedSpaceCalculator {
       return this;
       return this;
     }
     }
 
 
+    public Builder setDir(String newDir) {
+      this.dir = newDir;
+      return this;
+    }
+
     ReservedSpaceCalculator build() {
     ReservedSpaceCalculator build() {
       try {
       try {
         Class<? extends ReservedSpaceCalculator> clazz = conf.getClass(
         Class<? extends ReservedSpaceCalculator> clazz = conf.getClass(
@@ -69,10 +76,10 @@ public abstract class ReservedSpaceCalculator {
             ReservedSpaceCalculator.class);
             ReservedSpaceCalculator.class);
 
 
         Constructor constructor = clazz.getConstructor(
         Constructor constructor = clazz.getConstructor(
-            Configuration.class, DF.class, StorageType.class);
+            Configuration.class, DF.class, StorageType.class, String.class);
 
 
         return (ReservedSpaceCalculator) constructor.newInstance(
         return (ReservedSpaceCalculator) constructor.newInstance(
-            conf, usage, storageType);
+            conf, usage, storageType, dir);
       } catch (Exception e) {
       } catch (Exception e) {
         throw new IllegalStateException(
         throw new IllegalStateException(
             "Error instantiating ReservedSpaceCalculator", e);
             "Error instantiating ReservedSpaceCalculator", e);
@@ -84,20 +91,30 @@ public abstract class ReservedSpaceCalculator {
   private final Configuration conf;
   private final Configuration conf;
   private final StorageType storageType;
   private final StorageType storageType;
 
 
+  private final String dir;
+
   ReservedSpaceCalculator(Configuration conf, DF usage,
   ReservedSpaceCalculator(Configuration conf, DF usage,
-      StorageType storageType) {
+      StorageType storageType, String dir) {
     this.usage = usage;
     this.usage = usage;
     this.conf = conf;
     this.conf = conf;
     this.storageType = storageType;
     this.storageType = storageType;
+    this.dir = dir;
   }
   }
 
 
   DF getUsage() {
   DF getUsage() {
     return usage;
     return usage;
   }
   }
 
 
+  String getDir() {
+    return dir;
+  }
+
   long getReservedFromConf(String key, long defaultValue) {
   long getReservedFromConf(String key, long defaultValue) {
-    return conf.getLong(key + "." + StringUtils.toLowerCase(
-        storageType.toString()), conf.getLongBytes(key, defaultValue));
+    return conf.getLong(
+        key + "." + getDir() + "." + StringUtils.toLowerCase(storageType.toString()),
+        conf.getLong(key + "." + getDir(),
+            conf.getLong(key + "." + StringUtils.toLowerCase(storageType.toString()),
+                conf.getLongBytes(key, defaultValue))));
   }
   }
 
 
   /**
   /**
@@ -117,8 +134,8 @@ public abstract class ReservedSpaceCalculator {
     private final long reservedBytes;
     private final long reservedBytes;
 
 
     public ReservedSpaceCalculatorAbsolute(Configuration conf, DF usage,
     public ReservedSpaceCalculatorAbsolute(Configuration conf, DF usage,
-        StorageType storageType) {
-      super(conf, usage, storageType);
+        StorageType storageType, String dir) {
+      super(conf, usage, storageType, dir);
       this.reservedBytes = getReservedFromConf(DFS_DATANODE_DU_RESERVED_KEY,
       this.reservedBytes = getReservedFromConf(DFS_DATANODE_DU_RESERVED_KEY,
           DFS_DATANODE_DU_RESERVED_DEFAULT);
           DFS_DATANODE_DU_RESERVED_DEFAULT);
     }
     }
@@ -138,8 +155,8 @@ public abstract class ReservedSpaceCalculator {
     private final long reservedPct;
     private final long reservedPct;
 
 
     public ReservedSpaceCalculatorPercentage(Configuration conf, DF usage,
     public ReservedSpaceCalculatorPercentage(Configuration conf, DF usage,
-        StorageType storageType) {
-      super(conf, usage, storageType);
+        StorageType storageType, String dir) {
+      super(conf, usage, storageType, dir);
       this.reservedPct = getReservedFromConf(
       this.reservedPct = getReservedFromConf(
           DFS_DATANODE_DU_RESERVED_PERCENTAGE_KEY,
           DFS_DATANODE_DU_RESERVED_PERCENTAGE_KEY,
           DFS_DATANODE_DU_RESERVED_PERCENTAGE_DEFAULT);
           DFS_DATANODE_DU_RESERVED_PERCENTAGE_DEFAULT);
@@ -162,8 +179,8 @@ public abstract class ReservedSpaceCalculator {
     private final long reservedPct;
     private final long reservedPct;
 
 
     public ReservedSpaceCalculatorConservative(Configuration conf, DF usage,
     public ReservedSpaceCalculatorConservative(Configuration conf, DF usage,
-        StorageType storageType) {
-      super(conf, usage, storageType);
+        StorageType storageType, String dir) {
+      super(conf, usage, storageType, dir);
       this.reservedBytes = getReservedFromConf(DFS_DATANODE_DU_RESERVED_KEY,
       this.reservedBytes = getReservedFromConf(DFS_DATANODE_DU_RESERVED_KEY,
           DFS_DATANODE_DU_RESERVED_DEFAULT);
           DFS_DATANODE_DU_RESERVED_DEFAULT);
       this.reservedPct = getReservedFromConf(
       this.reservedPct = getReservedFromConf(
@@ -197,8 +214,8 @@ public abstract class ReservedSpaceCalculator {
     private final long reservedPct;
     private final long reservedPct;
 
 
     public ReservedSpaceCalculatorAggressive(Configuration conf, DF usage,
     public ReservedSpaceCalculatorAggressive(Configuration conf, DF usage,
-        StorageType storageType) {
-      super(conf, usage, storageType);
+        StorageType storageType, String dir) {
+      super(conf, usage, storageType, dir);
       this.reservedBytes = getReservedFromConf(DFS_DATANODE_DU_RESERVED_KEY,
       this.reservedBytes = getReservedFromConf(DFS_DATANODE_DU_RESERVED_KEY,
           DFS_DATANODE_DU_RESERVED_DEFAULT);
           DFS_DATANODE_DU_RESERVED_DEFAULT);
       this.reservedPct = getReservedFromConf(
       this.reservedPct = getReservedFromConf(

+ 17 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml

@@ -397,12 +397,19 @@
   <name>dfs.datanode.du.reserved</name>
   <name>dfs.datanode.du.reserved</name>
   <value>0</value>
   <value>0</value>
   <description>Reserved space in bytes per volume. Always leave this much space free for non dfs use.
   <description>Reserved space in bytes per volume. Always leave this much space free for non dfs use.
+      Specific directory based reservation is supported. The property can be followed with directory
+      name which is set at 'dfs.datanode.data.dir'. For example, reserved space for /data/hdfs1/data
+      can be configured using property 'dfs.datanode.du.reserved./data/hdfs1/data'. If specific directory
+      reservation is not configured then dfs.datanode.du.reserved will be used.
       Specific storage type based reservation is also supported. The property can be followed with
       Specific storage type based reservation is also supported. The property can be followed with
       corresponding storage types ([ssd]/[disk]/[archive]/[ram_disk]/[nvdimm]) for cluster with heterogeneous storage.
       corresponding storage types ([ssd]/[disk]/[archive]/[ram_disk]/[nvdimm]) for cluster with heterogeneous storage.
       For example, reserved space for RAM_DISK storage can be configured using property
       For example, reserved space for RAM_DISK storage can be configured using property
       'dfs.datanode.du.reserved.ram_disk'. If specific storage type reservation is not configured
       'dfs.datanode.du.reserved.ram_disk'. If specific storage type reservation is not configured
       then dfs.datanode.du.reserved will be used. Support multiple size unit suffix(case insensitive),
       then dfs.datanode.du.reserved will be used. Support multiple size unit suffix(case insensitive),
-      as described in dfs.blocksize.
+      as described in dfs.blocksize. Use directory name and storage type based reservation at the
+      same time is also allowed if both are configured.
+      Property priority example: dfs.datanode.du.reserved./data/hdfs1/data.ram_disk >
+      dfs.datanode.du.reserved./data/hdfs1/data > dfs.datanode.du.reserved.ram_disk > dfs.datanode.du.reserved
       Note: In case of using tune2fs to set reserved-blocks-percentage, or other filesystem tools,
       Note: In case of using tune2fs to set reserved-blocks-percentage, or other filesystem tools,
       then you can possibly run into out of disk errors because hadoop will not check those
       then you can possibly run into out of disk errors because hadoop will not check those
       external tool configurations.
       external tool configurations.
@@ -414,12 +421,19 @@
   <value>0</value>
   <value>0</value>
   <description>Reserved space in percentage. Read dfs.datanode.du.reserved.calculator to see
   <description>Reserved space in percentage. Read dfs.datanode.du.reserved.calculator to see
     when this takes effect. The actual number of bytes reserved will be calculated by using the
     when this takes effect. The actual number of bytes reserved will be calculated by using the
-    total capacity of the data directory in question. Specific storage type based reservation
+    total capacity of the data directory in question. Specific directory based reservation is
+    supported. The property can be followed with directory name which is set at 'dfs.datanode.data.dir'.
+    For example, reserved percentage space for /data/hdfs1/data can be configured using property
+    'dfs.datanode.du.reserved.pct./data/hdfs1/data'. If specific directory reservation is not
+    configured then dfs.datanode.du.reserved.pct will be used. Specific storage type based reservation
     is also supported. The property can be followed with corresponding storage types
     is also supported. The property can be followed with corresponding storage types
     ([ssd]/[disk]/[archive]/[ram_disk]/[nvdimm]) for cluster with heterogeneous storage.
     ([ssd]/[disk]/[archive]/[ram_disk]/[nvdimm]) for cluster with heterogeneous storage.
     For example, reserved percentage space for RAM_DISK storage can be configured using property
     For example, reserved percentage space for RAM_DISK storage can be configured using property
     'dfs.datanode.du.reserved.pct.ram_disk'. If specific storage type reservation is not configured
     'dfs.datanode.du.reserved.pct.ram_disk'. If specific storage type reservation is not configured
-    then dfs.datanode.du.reserved.pct will be used.
+    then dfs.datanode.du.reserved.pct will be used. Use directory and storage type based reservation
+    is also allowed if both are configured.
+    Priority example: dfs.datanode.du.reserved.pct./data/hdfs1/data.ram_disk > dfs.datanode.du.reserved.pct./data/hdfs1/data
+    > dfs.datanode.du.reserved.pct.ram_disk > dfs.datanode.du.reserved.pct
   </description>
   </description>
 </property>
 </property>
 
 

+ 55 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestReservedSpaceCalculator.java

@@ -168,6 +168,55 @@ public class TestReservedSpaceCalculator {
     checkReserved(StorageType.ARCHIVE, 100000, 5000);
     checkReserved(StorageType.ARCHIVE, 100000, 5000);
   }
   }
 
 
+  @Test
+  public void testReservedSpaceAbsolutePerDir() {
+    conf.setClass(DFS_DATANODE_DU_RESERVED_CALCULATOR_KEY, ReservedSpaceCalculatorAbsolute.class,
+        ReservedSpaceCalculator.class);
+
+    String dir1 = "/data/hdfs1/data";
+    String dir2 = "/data/hdfs2/data";
+    String dir3 = "/data/hdfs3/data";
+
+    conf.setLong(DFS_DATANODE_DU_RESERVED_KEY + "." + dir1 + ".ssd", 900);
+    conf.setLong(DFS_DATANODE_DU_RESERVED_KEY + "." + dir1, 1800);
+    conf.setLong(DFS_DATANODE_DU_RESERVED_KEY + "." + dir2, 2700);
+    conf.setLong(DFS_DATANODE_DU_RESERVED_KEY + ".ssd", 3600);
+    conf.setLong(DFS_DATANODE_DU_RESERVED_KEY, 4500);
+
+    checkReserved(StorageType.SSD, 10000, 900, dir1);
+    checkReserved(StorageType.DISK, 10000, 1800, dir1);
+    checkReserved(StorageType.SSD, 10000, 2700, dir2);
+    checkReserved(StorageType.DISK, 10000, 2700, dir2);
+    checkReserved(StorageType.SSD, 10000, 3600, dir3);
+    checkReserved(StorageType.DISK, 10000, 4500, dir3);
+  }
+
+  @Test
+  public void testReservedSpacePercentagePerDir() {
+    conf.setClass(DFS_DATANODE_DU_RESERVED_CALCULATOR_KEY,
+            ReservedSpaceCalculatorPercentage.class,
+            ReservedSpaceCalculator.class);
+
+    String dir1 = "/data/hdfs1/data";
+    String dir2 = "/data/hdfs2/data";
+    String dir3 = "/data/hdfs3/data";
+
+    // Set percentage reserved values for different directories
+    conf.setLong(DFS_DATANODE_DU_RESERVED_PERCENTAGE_KEY + "." + dir1 + ".ssd", 20);
+    conf.setLong(DFS_DATANODE_DU_RESERVED_PERCENTAGE_KEY + "." + dir1, 10);
+    conf.setLong(DFS_DATANODE_DU_RESERVED_PERCENTAGE_KEY + "." + dir2, 25);
+    conf.setLong(DFS_DATANODE_DU_RESERVED_PERCENTAGE_KEY + ".ssd", 30);
+    conf.setLong(DFS_DATANODE_DU_RESERVED_PERCENTAGE_KEY, 40);
+
+    // Verify reserved space calculations for different directories and storage types
+    checkReserved(StorageType.SSD, 10000, 2000, dir1);
+    checkReserved(StorageType.DISK, 10000, 1000, dir1);
+    checkReserved(StorageType.SSD, 10000, 2500, dir2);
+    checkReserved(StorageType.DISK, 10000, 2500, dir2);
+    checkReserved(StorageType.SSD, 10000, 3000, dir3);
+    checkReserved(StorageType.DISK, 10000, 4000, dir3);
+  }
+
   @Test(expected = IllegalStateException.class)
   @Test(expected = IllegalStateException.class)
   public void testInvalidCalculator() {
   public void testInvalidCalculator() {
     conf.set(DFS_DATANODE_DU_RESERVED_CALCULATOR_KEY, "INVALIDTYPE");
     conf.set(DFS_DATANODE_DU_RESERVED_CALCULATOR_KEY, "INVALIDTYPE");
@@ -179,10 +228,15 @@ public class TestReservedSpaceCalculator {
 
 
   private void checkReserved(StorageType storageType,
   private void checkReserved(StorageType storageType,
       long totalCapacity, long reservedExpected) {
       long totalCapacity, long reservedExpected) {
+    checkReserved(storageType, totalCapacity, reservedExpected, "NULL");
+  }
+
+  private void checkReserved(StorageType storageType,
+      long totalCapacity, long reservedExpected, String dir) {
     when(usage.getCapacity()).thenReturn(totalCapacity);
     when(usage.getCapacity()).thenReturn(totalCapacity);
 
 
     reserved = new ReservedSpaceCalculator.Builder(conf).setUsage(usage)
     reserved = new ReservedSpaceCalculator.Builder(conf).setUsage(usage)
-        .setStorageType(storageType).build();
+        .setStorageType(storageType).setDir(dir).build();
     assertEquals(reservedExpected, reserved.getReserved());
     assertEquals(reservedExpected, reserved.getReserved());
   }
   }
 }
 }