Browse Source

HDDS-1568 : Add RocksDB metrics to OM. Contributed by Aravindan Vijayan

avijayanhwx 6 years ago
parent
commit
33419a980a

+ 2 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/RocksDBStore.java

@@ -75,7 +75,8 @@ public class RocksDBStore implements MetadataStore {
         jmxProperties.put("dbName", dbFile.getName());
         statMBeanName = HddsUtils.registerWithJmxProperties(
             "Ozone", "RocksDbStore", jmxProperties,
-            new RocksDBStoreMBean(dbOptions.statistics()));
+            RocksDBStoreMBean.create(dbOptions.statistics(),
+                dbFile.getName()));
         if (statMBeanName == null) {
           LOG.warn("jmx registration failed during RocksDB init, db path :{}",
               dbFile.getAbsolutePath());

+ 77 - 2
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/RocksDBStoreMBean.java

@@ -18,10 +18,18 @@
 
 package org.apache.hadoop.utils;
 
+import org.apache.hadoop.metrics2.MetricsCollector;
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import org.apache.hadoop.metrics2.MetricsSource;
+import org.apache.hadoop.metrics2.MetricsSystem;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.metrics2.lib.Interns;
 import org.rocksdb.HistogramData;
 import org.rocksdb.HistogramType;
 import org.rocksdb.Statistics;
 import org.rocksdb.TickerType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.management.Attribute;
 import javax.management.AttributeList;
@@ -41,13 +49,21 @@ import java.util.Set;
 /**
  * Adapter JMX bean to publish all the Rocksdb metrics.
  */
-public class RocksDBStoreMBean implements DynamicMBean {
+public class RocksDBStoreMBean implements DynamicMBean, MetricsSource {
 
   private Statistics statistics;
 
   private Set<String> histogramAttributes = new HashSet<>();
 
-  public RocksDBStoreMBean(Statistics statistics) {
+  private String contextName;
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(RocksDBStoreMBean.class);
+
+  public final static String ROCKSDB_CONTEXT_PREFIX = "Rocksdb_";
+
+  public RocksDBStoreMBean(Statistics statistics, String dbName) {
+    this.contextName = ROCKSDB_CONTEXT_PREFIX + dbName;
     this.statistics = statistics;
     histogramAttributes.add("Average");
     histogramAttributes.add("Median");
@@ -56,6 +72,22 @@ public class RocksDBStoreMBean implements DynamicMBean {
     histogramAttributes.add("StandardDeviation");
   }
 
+  public static RocksDBStoreMBean create(Statistics statistics,
+                                         String contextName) {
+
+    RocksDBStoreMBean rocksDBStoreMBean = new RocksDBStoreMBean(
+        statistics, contextName);
+    MetricsSystem ms = DefaultMetricsSystem.instance();
+    MetricsSource metricsSource = ms.getSource(rocksDBStoreMBean.contextName);
+    if (metricsSource != null) {
+      return (RocksDBStoreMBean)metricsSource;
+    } else {
+      return ms.register(rocksDBStoreMBean.contextName,
+          "RocksDB Metrics",
+          rocksDBStoreMBean);
+    }
+  }
+
   @Override
   public Object getAttribute(String attribute)
       throws AttributeNotFoundException, MBeanException, ReflectionException {
@@ -141,4 +173,47 @@ public class RocksDBStoreMBean implements DynamicMBean {
         attributes.toArray(new MBeanAttributeInfo[0]), null, null, null);
 
   }
+
+  @Override
+  public void getMetrics(MetricsCollector metricsCollector, boolean b) {
+    MetricsRecordBuilder rb = metricsCollector.addRecord(contextName);
+    getHistogramData(rb);
+    getTickerTypeData(rb);
+  }
+
+  /**
+   * Collect all histogram metrics from RocksDB statistics.
+   * @param rb Metrics Record Builder.
+   */
+  private void getHistogramData(MetricsRecordBuilder rb) {
+    for (HistogramType histogramType : HistogramType.values()) {
+      HistogramData histogram =
+          statistics.getHistogramData(
+              HistogramType.valueOf(histogramType.name()));
+      for (String histogramAttribute : histogramAttributes) {
+        try {
+          Method method =
+              HistogramData.class.getMethod("get" + histogramAttribute);
+          double metricValue =  (double) method.invoke(histogram);
+          rb.addGauge(Interns.info(histogramType.name() + "_" +
+                  histogramAttribute.toUpperCase(), "RocksDBStat"),
+              metricValue);
+        } catch (Exception e) {
+          LOG.error("Error reading histogram data {} ", e);
+        }
+      }
+    }
+  }
+
+  /**
+   * Collect all Counter metrics from RocksDB statistics.
+   * @param rb Metrics Record Builder.
+   */
+  private void getTickerTypeData(MetricsRecordBuilder rb) {
+    for (TickerType tickerType : TickerType.values()) {
+      rb.addCounter(Interns.info(tickerType.name(), "RocksDBStat"),
+          statistics.getTickerCount(tickerType));
+    }
+  }
+
 }

+ 16 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/DBStoreBuilder.java

@@ -28,6 +28,8 @@ import org.rocksdb.ColumnFamilyDescriptor;
 import org.rocksdb.ColumnFamilyOptions;
 import org.rocksdb.DBOptions;
 import org.rocksdb.RocksDB;
+import org.rocksdb.Statistics;
+import org.rocksdb.StatsLevel;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -42,6 +44,9 @@ import java.util.Set;
 
 import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_DB_PROFILE;
 import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_DEFAULT_DB_PROFILE;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_STORE_ROCKSDB_STATISTICS;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_STORE_ROCKSDB_STATISTICS_DEFAULT;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_STORE_ROCKSDB_STATISTICS_OFF;
 
 /**
  * DBStore Builder.
@@ -57,12 +62,16 @@ public final class DBStoreBuilder {
   private List<String> tableNames;
   private Configuration configuration;
   private CodecRegistry registry;
+  private String rocksDbStat;
 
   private DBStoreBuilder(Configuration configuration) {
     tables = new HashSet<>();
     tableNames = new LinkedList<>();
     this.configuration = configuration;
     this.registry = new CodecRegistry();
+    this.rocksDbStat = configuration.getTrimmed(
+        OZONE_METADATA_STORE_ROCKSDB_STATISTICS,
+        OZONE_METADATA_STORE_ROCKSDB_STATISTICS_DEFAULT);
   }
 
   public static DBStoreBuilder newBuilder(Configuration configuration) {
@@ -187,7 +196,13 @@ public final class DBStoreBuilder {
 
     if (option == null) {
       LOG.info("Using default options. {}", dbProfile.toString());
-      return dbProfile.getDBOptions();
+      option = dbProfile.getDBOptions();
+    }
+
+    if (!rocksDbStat.equals(OZONE_METADATA_STORE_ROCKSDB_STATISTICS_OFF)) {
+      Statistics statistics = new Statistics();
+      statistics.setStatsLevel(StatsLevel.valueOf(rocksDbStat));
+      option = option.setStatistics(statistics);
     }
     return option;
   }

+ 2 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/RDBStore.java

@@ -108,7 +108,8 @@ public class RDBStore implements DBStore {
         jmxProperties.put("dbName", dbFile.getName());
         statMBeanName = HddsUtils.registerWithJmxProperties(
             "Ozone", "RocksDbStore", jmxProperties,
-            new RocksDBStoreMBean(dbOptions.statistics()));
+            RocksDBStoreMBean.create(dbOptions.statistics(),
+                dbFile.getName()));
         if (statMBeanName == null) {
           LOG.warn("jmx registration failed during RocksDB init, db path :{}",
               dbFile.getAbsolutePath());

+ 151 - 13
hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/TestRocksDBStoreMBean.java

@@ -19,6 +19,14 @@ package org.apache.hadoop.utils;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.metrics2.AbstractMetric;
+import org.apache.hadoop.metrics2.MetricsCollector;
+import org.apache.hadoop.metrics2.MetricsInfo;
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import org.apache.hadoop.metrics2.MetricsSource;
+import org.apache.hadoop.metrics2.MetricsSystem;
+import org.apache.hadoop.metrics2.MetricsTag;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.ozone.OzoneConfigKeys;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.Assert;
@@ -27,9 +35,14 @@ import org.junit.Test;
 
 import javax.management.MBeanServer;
 import java.io.File;
+import java.io.IOException;
 import java.lang.management.ManagementFactory;
+import java.util.HashMap;
+import java.util.Map;
 
 import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 /**
  * Test the JMX interface for the rocksdb metastore implementation.
@@ -49,18 +62,8 @@ public class TestRocksDBStoreMBean {
 
   @Test
   public void testJmxBeans() throws Exception {
-    File testDir =
-        GenericTestUtils.getTestDir(getClass().getSimpleName() + "-withstat");
 
-    conf.set(OzoneConfigKeys.OZONE_METADATA_STORE_ROCKSDB_STATISTICS, "ALL");
-
-    RocksDBStore metadataStore =
-        (RocksDBStore) MetadataStoreBuilder.newBuilder().setConf(conf)
-            .setCreateIfMissing(true).setDbFile(testDir).build();
-
-    for (int i = 0; i < 10; i++) {
-      metadataStore.put("key".getBytes(UTF_8), "value".getBytes(UTF_8));
-    }
+    RocksDBStore metadataStore = getTestRocksDBStoreWithData();
 
     MBeanServer platformMBeanServer =
         ManagementFactory.getPlatformMBeanServer();
@@ -69,11 +72,11 @@ public class TestRocksDBStoreMBean {
     Object keysWritten = platformMBeanServer
         .getAttribute(metadataStore.getStatMBeanName(), "NUMBER_KEYS_WRITTEN");
 
-    Assert.assertEquals(10L, keysWritten);
+    assertEquals(10L, keysWritten);
 
     Object dbWriteAverage = platformMBeanServer
         .getAttribute(metadataStore.getStatMBeanName(), "DB_WRITE_AVERAGE");
-    Assert.assertTrue((double) dbWriteAverage > 0);
+    assertTrue((double) dbWriteAverage > 0);
 
     metadataStore.close();
 
@@ -93,4 +96,139 @@ public class TestRocksDBStoreMBean {
 
     Assert.assertNull(metadataStore.getStatMBeanName());
   }
+
+  @Test
+  public void testMetricsSystemIntegration() throws Exception {
+
+    RocksDBStore metadataStore = getTestRocksDBStoreWithData();
+    Thread.sleep(2000);
+
+    MetricsSystem ms = DefaultMetricsSystem.instance();
+    MetricsSource rdbSource =
+        ms.getSource("Rocksdb_TestRocksDBStoreMBean-withstat");
+
+    BufferedMetricsCollector metricsCollector = new BufferedMetricsCollector();
+    rdbSource.getMetrics(metricsCollector, true);
+
+    Map<String, Double> metrics = metricsCollector.getMetricsRecordBuilder()
+        .getMetrics();
+    assertTrue(10.0 == metrics.get("NUMBER_KEYS_WRITTEN"));
+    assertTrue(metrics.get("DB_WRITE_AVERAGE") > 0);
+    metadataStore.close();
+  }
+
+  private RocksDBStore getTestRocksDBStoreWithData() throws IOException {
+    File testDir =
+        GenericTestUtils.getTestDir(getClass().getSimpleName() + "-withstat");
+
+    conf.set(OzoneConfigKeys.OZONE_METADATA_STORE_ROCKSDB_STATISTICS, "ALL");
+
+    RocksDBStore metadataStore =
+        (RocksDBStore) MetadataStoreBuilder.newBuilder().setConf(conf)
+            .setCreateIfMissing(true).setDbFile(testDir).build();
+
+    for (int i = 0; i < 10; i++) {
+      metadataStore.put("key".getBytes(UTF_8), "value".getBytes(UTF_8));
+    }
+
+    return metadataStore;
+  }
+}
+
+/**
+ * Test class to buffer a single MetricsRecordBuilder instance.
+ */
+class BufferedMetricsCollector implements MetricsCollector {
+
+  private BufferedMetricsRecordBuilderImpl metricsRecordBuilder;
+
+  BufferedMetricsCollector() {
+    metricsRecordBuilder = new BufferedMetricsRecordBuilderImpl();
+  }
+
+  public BufferedMetricsRecordBuilderImpl getMetricsRecordBuilder() {
+    return metricsRecordBuilder;
+  }
+
+  @Override
+  public MetricsRecordBuilder addRecord(String s) {
+    metricsRecordBuilder.setContext(s);
+    return metricsRecordBuilder;
+  }
+
+  @Override
+  public MetricsRecordBuilder addRecord(MetricsInfo metricsInfo) {
+    return metricsRecordBuilder;
+  }
+
+  /**
+   * Test class to buffer a single snapshot of metrics.
+   */
+  class BufferedMetricsRecordBuilderImpl extends MetricsRecordBuilder {
+
+    private Map<String, Double> metrics = new HashMap<>();
+    private String contextName;
+
+    public Map<String, Double> getMetrics() {
+      return metrics;
+    }
+
+    @Override
+    public MetricsRecordBuilder tag(MetricsInfo metricsInfo, String s) {
+      return null;
+    }
+
+    @Override
+    public MetricsRecordBuilder add(MetricsTag metricsTag) {
+      return null;
+    }
+
+    @Override
+    public MetricsRecordBuilder add(AbstractMetric abstractMetric) {
+      return null;
+    }
+
+    @Override
+    public MetricsRecordBuilder setContext(String s) {
+      this.contextName = s;
+      return this;
+    }
+
+    @Override
+    public MetricsRecordBuilder addCounter(MetricsInfo metricsInfo, int i) {
+      return null;
+    }
+
+    @Override
+    public MetricsRecordBuilder addCounter(MetricsInfo metricsInfo, long l) {
+      metrics.put(metricsInfo.name(), (double)l);
+      return this;
+    }
+
+    @Override
+    public MetricsRecordBuilder addGauge(MetricsInfo metricsInfo, int i) {
+      return null;
+    }
+
+    @Override
+    public MetricsRecordBuilder addGauge(MetricsInfo metricsInfo, long l) {
+      return null;
+    }
+
+    @Override
+    public MetricsRecordBuilder addGauge(MetricsInfo metricsInfo, float v) {
+      return null;
+    }
+
+    @Override
+    public MetricsRecordBuilder addGauge(MetricsInfo metricsInfo, double v) {
+      metrics.put(metricsInfo.name(), v);
+      return this;
+    }
+
+    @Override
+    public MetricsCollector parent() {
+      return null;
+    }
+  }
 }

+ 9 - 0
hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/PrometheusMetricsSink.java

@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hdds.server;
 
+import static org.apache.hadoop.utils.RocksDBStoreMBean.ROCKSDB_CONTEXT_PREFIX;
+
 import java.io.IOException;
 import java.io.Writer;
 import java.util.HashMap;
@@ -24,6 +26,7 @@ import java.util.Map;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.metrics2.AbstractMetric;
 import org.apache.hadoop.metrics2.MetricType;
 import org.apache.hadoop.metrics2.MetricsRecord;
@@ -90,6 +93,12 @@ public class PrometheusMetricsSink implements MetricsSink {
    */
   public String prometheusName(String recordName,
       String metricName) {
+
+    //RocksDB metric names already have underscores as delimiters.
+    if (StringUtils.isNotEmpty(recordName) &&
+        recordName.startsWith(ROCKSDB_CONTEXT_PREFIX)) {
+      return recordName.toLowerCase() + "_" + metricName.toLowerCase();
+    }
     String baseName = upperFirst(recordName) + upperFirst(metricName);
     Matcher m = UPPER_CASE_SEQ.matcher(baseName);
     StringBuffer sb = new StringBuffer();

+ 5 - 0
hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/TestPrometheusMetricsSink.java

@@ -82,6 +82,11 @@ public class TestPrometheusMetricsSink {
 
     Assert.assertEquals("rpc_time_small",
         sink.prometheusName("RpcTime", "small"));
+
+    //RocksDB metrics are handled differently.
+
+    Assert.assertEquals("rocksdb_om.db_num_open_connections",
+        sink.prometheusName("Rocksdb_om.db", "num_open_connections"));
   }
 
   /**