Browse Source

HDFS-15214. WebHDFS: Add snapshot counts to Content Summary. Contributed by hemanthboyina.

Takanobu Asanuma 5 years ago
parent
commit
b74e47e31a

+ 19 - 0
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java

@@ -440,6 +440,25 @@ public class JsonUtilClient {
         .directoryCount(directoryCount)
         .erasureCodingPolicy(ecPolicy);
     builder = buildQuotaUsage(builder, m, ContentSummary.Builder.class);
+    if (m.get("snapshotLength") != null) {
+      long snapshotLength = ((Number) m.get("snapshotLength")).longValue();
+      builder.snapshotLength(snapshotLength);
+    }
+    if (m.get("snapshotFileCount") != null) {
+      long snapshotFileCount =
+          ((Number) m.get("snapshotFileCount")).longValue();
+      builder.snapshotFileCount(snapshotFileCount);
+    }
+    if (m.get("snapshotDirectoryCount") != null) {
+      long snapshotDirectoryCount =
+          ((Number) m.get("snapshotDirectoryCount")).longValue();
+      builder.snapshotDirectoryCount(snapshotDirectoryCount);
+    }
+    if (m.get("snapshotSpaceConsumed") != null) {
+      long snapshotSpaceConsumed =
+          ((Number) m.get("snapshotSpaceConsumed")).longValue();
+      builder.snapshotSpaceConsumed(snapshotSpaceConsumed);
+    }
     return builder.build();
   }
 

+ 5 - 0
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java

@@ -358,6 +358,11 @@ public class JsonUtil {
     // For ContentSummary we don't need this since we already have
     // separate count for file and directory.
     m.putAll(toJsonMap(contentsummary, false));
+    m.put("snapshotLength", contentsummary.getSnapshotLength());
+    m.put("snapshotFileCount", contentsummary.getSnapshotFileCount());
+    m.put("snapshotDirectoryCount",
+        contentsummary.getSnapshotDirectoryCount());
+    m.put("snapshotSpaceConsumed", contentsummary.getSnapshotSpaceConsumed());
     return toJsonString(ContentSummary.class, m);
   }
 

+ 20 - 7
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java

@@ -306,7 +306,13 @@ public class TestJsonUtil {
   @Test
   public void testToJsonFromContentSummary() {
     String jsonString =
-        "{\"ContentSummary\":{\"directoryCount\":33333,\"ecPolicy\":\"RS-6-3-1024k\",\"fileCount\":22222,\"length\":11111,\"quota\":44444,\"spaceConsumed\":55555,\"spaceQuota\":66666,\"typeQuota\":{}}}";
+        "{\"ContentSummary\":{\"directoryCount\":33333,\"ecPolicy\":"
+            + "\"RS-6-3-1024k\",\"fileCount\":22222,\"length\":11111,"
+            + "\"quota\":44444,\"snapshotDirectoryCount\":1,"
+            + "\"snapshotFileCount\":2,\"snapshotLength\":10,"
+            + "\"snapshotSpaceConsumed\":30,\"spaceConsumed\":55555,"
+            + "\"spaceQuota\":66666,\"typeQuota\":{}}}";
+
     long length = 11111;
     long fileCount = 22222;
     long directoryCount = 33333;
@@ -314,15 +320,22 @@ public class TestJsonUtil {
     long spaceConsumed = 55555;
     long spaceQuota = 66666;
     String ecPolicy = "RS-6-3-1024k";
-
-    ContentSummary contentSummary = new ContentSummary.Builder().length(length).
-        fileCount(fileCount).directoryCount(directoryCount).quota(quota).
-        spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).
-        erasureCodingPolicy(ecPolicy).build();
+    long snapshotLength = 10;
+    long snapshotFileCount = 2;
+    long snapshotDirectoryCount = 1;
+    long snapshotSpaceConsumed = 30;
+
+    ContentSummary contentSummary = new ContentSummary.Builder().length(length)
+        .fileCount(fileCount).directoryCount(directoryCount).quota(quota)
+        .spaceConsumed(spaceConsumed).spaceQuota(spaceQuota)
+        .erasureCodingPolicy(ecPolicy).snapshotLength(snapshotLength)
+        .snapshotFileCount(snapshotFileCount)
+        .snapshotDirectoryCount(snapshotDirectoryCount)
+        .snapshotSpaceConsumed(snapshotSpaceConsumed).build();
 
     Assert.assertEquals(jsonString, JsonUtil.toJsonString(contentSummary));
   }
-  
+
   @Test
   public void testToJsonFromXAttrs() throws IOException {
     String jsonString = 

+ 24 - 0
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java

@@ -994,6 +994,30 @@ public class TestWebHDFS {
         .assertTrue((contentSummary.getTypeQuota(StorageType.DISK) == 100000));
   }
 
+  /**
+   * Test Snapshot related information in ContentSummary.
+   */
+  @Test
+  public void testSnapshotInContentSummary() throws Exception {
+    final Configuration conf = WebHdfsTestUtil.createConf();
+    Path dirPath = new Path("/dir");
+    final Path filePath = new Path("/dir/file");
+    cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
+    final WebHdfsFileSystem webHdfs = WebHdfsTestUtil.getWebHdfsFileSystem(conf,
+        WebHdfsConstants.WEBHDFS_SCHEME);
+    final DistributedFileSystem dfs = cluster.getFileSystem();
+    DFSTestUtil.createFile(dfs, filePath, 10, (short) 3, 0L);
+    dfs.allowSnapshot(dirPath);
+    dfs.createSnapshot(dirPath);
+    dfs.delete(filePath, true);
+    ContentSummary contentSummary = webHdfs.getContentSummary(dirPath);
+    assertEquals(1, contentSummary.getSnapshotFileCount());
+    assertEquals(10, contentSummary.getSnapshotLength());
+    assertEquals(30, contentSummary.getSnapshotSpaceConsumed());
+    assertEquals(dfs.getContentSummary(dirPath),
+        webHdfs.getContentSummary(dirPath));
+  }
+
   @Test
   public void testQuotaUsage() throws Exception {
     final Configuration conf = WebHdfsTestUtil.createConf();