瀏覽代碼

HDFS-4518. Finer grained metrics for HDFS capacity. Contributed by Arpit Agarwal.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-1@1451385 13f79535-47bb-0310-9956-ffa450edef68
Suresh Srinivas 12 年之前
父節點
當前提交
9375bb9c37

+ 4 - 0
src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java

@@ -6383,6 +6383,10 @@ public class FSNamesystem implements FSConstants, FSNamesystemMBean, FSClusterSt
                 roundBytesToGBytes(getCapacityUsed()))
       .addGauge("CapacityRemainingGB", "",
                 roundBytesToGBytes(getCapacityRemaining()))
+      .addGauge("CapacityTotal", "", getCapacityTotal())
+      .addGauge("CapacityUsed", "", getCapacityUsed())
+      .addGauge("CapacityRemaining", "", getCapacityRemaining())
+      .addGauge("CapacityUsedNonDFS", "", getNonDfsUsedSpace())
       .addGauge("TotalLoad", "", getTotalLoad())
       .addGauge("CorruptBlocks", "", getCorruptReplicaBlocks())
       .addGauge("ExcessBlocks", "", getExcessBlocks())

+ 17 - 0
src/test/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java

@@ -101,6 +101,23 @@ public class TestNameNodeMetrics extends TestCase {
     stm.close();
   }
 
+  /**
+   * Test that capacity metrics are exported and pass
+   * basic sanity tests.
+   */
+  public void testCapacityMetrics() throws Exception {
+    MetricsRecordBuilder rb = getMetrics(fsnMetrics);
+    long capacityTotal = getLongGauge("CapacityTotal", rb);
+    assert(capacityTotal != 0);
+    long capacityUsed = getLongGauge("CapacityUsed", rb);
+    long capacityRemaining =
+        getLongGauge("CapacityRemaining", rb);
+    long capacityUsedNonDFS =
+        getLongGauge("CapacityUsedNonDFS", rb);
+    assert(capacityUsed + capacityRemaining + capacityUsedNonDFS ==
+        capacityTotal);
+  }  
+
   /** Test metrics associated with addition of a file */
   public void testFileAdd() throws Exception {
     // Add files with 32 blocks

+ 26 - 0
src/test/org/apache/hadoop/test/MetricsAsserts.java

@@ -21,6 +21,9 @@ package org.apache.hadoop.test;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import static org.mockito.Mockito.*;
+
+import org.junit.Assert;
+import org.mockito.ArgumentCaptor;
 import org.mockito.stubbing.Answer;
 import org.mockito.invocation.InvocationOnMock;
 import static org.mockito.AdditionalMatchers.*;
@@ -173,6 +176,29 @@ public class MetricsAsserts {
     assertCounterGt(name, greater, getMetrics(source));
   }
 
+  /**
+   * Check that this metric was captured exactly once.
+   */
+  private static void checkCaptured(ArgumentCaptor<?> captor, String name) {
+    Assert.assertEquals("Expected exactly one metric for name " + name,
+        1, captor.getAllValues().size());
+  }
+
+  /**
+   * Lookup the value of a long metric by name. Throws exception if the
+   * metric could not be found.
+   *
+   * @param name of the metric.
+   * @param rb  the record builder mock used to getMetrics
+   * @return the long value of the metric if found.
+   */
+  public static long getLongGauge(String name, MetricsRecordBuilder rb) {
+    ArgumentCaptor<Long> captor = ArgumentCaptor.forClass(Long.class);
+    verify(rb, atLeast(0)).addGauge(eq(name), anyString(), captor.capture());
+    checkCaptured(captor, name);
+    return captor.getValue();
+  }
+
   /**
    * Assert that a double gauge metric is greater than a value
    * @param name  of the metric