瀏覽代碼

AMBARI-851. Hook up Ganglia property provider. (Tom Beerbower via mahadev)

git-svn-id: https://svn.apache.org/repos/asf/incubator/ambari/branches/AMBARI-666@1398615 13f79535-47bb-0310-9956-ffa450edef68
Mahadev Konar 12 年之前
父節點
當前提交
b8a44cbe2c
共有 31 個文件被更改,包括 1816 次插入415 次删除
  1. 2 0
      AMBARI-666-CHANGES.txt
  2. 18 2
      ambari-agent/src/test/python/TestHardware.py
  3. 0 91
      ambari-server/src/main/java/org/apache/ambari/server/controller/ganglia/GangliaHelper.java
  4. 31 1
      ambari-server/src/main/java/org/apache/ambari/server/controller/ganglia/GangliaMetric.java
  5. 127 67
      ambari-server/src/main/java/org/apache/ambari/server/controller/ganglia/GangliaPropertyProvider.java
  6. 788 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/ganglia/MetricsMapping.java
  7. 33 7
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
  8. 148 13
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java
  9. 19 1
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestImpl.java
  10. 50 31
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ResourceProviderImpl.java
  11. 4 10
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/SchemaImpl.java
  12. 140 8
      ambari-server/src/main/java/org/apache/ambari/server/controller/jdbc/JDBCProviderModule.java
  13. 7 32
      ambari-server/src/main/java/org/apache/ambari/server/controller/jdbc/JDBCResourceProvider.java
  14. 1 1
      ambari-server/src/main/java/org/apache/ambari/server/controller/jmx/JMXMetricHolder.java
  15. 21 25
      ambari-server/src/main/java/org/apache/ambari/server/controller/jmx/JMXPropertyProvider.java
  16. 4 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/spi/ProviderModule.java
  17. 3 3
      ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Request.java
  18. 2 16
      ambari-server/src/main/java/org/apache/ambari/server/controller/spi/ResourceProvider.java
  19. 19 3
      ambari-server/src/main/java/org/apache/ambari/server/controller/utilities/PropertyHelper.java
  20. 25 0
      ambari-server/src/main/resources/properties.json
  21. 1 1
      ambari-server/src/test/java/org/apache/ambari/server/controller/ganglia/GangliaHelperTest.java
  22. 36 15
      ambari-server/src/test/java/org/apache/ambari/server/controller/ganglia/GangliaPropertyProviderTest.java
  23. 18 7
      ambari-server/src/test/java/org/apache/ambari/server/controller/ganglia/TestStreamProvider.java
  24. 12 21
      ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterControllerImplTest.java
  25. 0 9
      ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ResourceProviderImplTest.java
  26. 4 9
      ambari-server/src/test/java/org/apache/ambari/server/controller/internal/SchemaImplTest.java
  27. 36 21
      ambari-server/src/test/java/org/apache/ambari/server/controller/internal/TestProviderModule.java
  28. 2 9
      ambari-server/src/test/java/org/apache/ambari/server/controller/jdbc/TestJDBCResourceProvider.java
  29. 9 9
      ambari-server/src/test/java/org/apache/ambari/server/controller/jmx/JMXPropertyProviderTest.java
  30. 4 3
      ambari-server/src/test/java/org/apache/ambari/server/controller/jmx/TestHostMappingProvider.java
  31. 252 0
      ambari-server/src/test/resources/temporal_ganglia.json

+ 2 - 0
AMBARI-666-CHANGES.txt

@@ -12,6 +12,8 @@ AMBARI-666 branch (unreleased changes)
 
   NEW FEATURES
 
+  AMBARI-851. Hook up Ganglia property provider. (Tom Beerbower via mahadev)
+
   AMBARI-863. Fix mvn tests to be able to run the python tests cleanly.
   (mahadev)
 

+ 18 - 2
ambari-agent/src/test/python/TestHardware.py

@@ -25,6 +25,22 @@ class TestHardware(TestCase):
   def test_build(self):
     hardware = Hardware()
     result = hardware.get()
-    self.assertTrue(result['coreCount'] >= 1)
-    self.assertTrue(result['netSpeed'] != None)
+    osdisks = hardware.osdisks()
+    for dev_item in result['mounts']:
+      self.assertTrue(dev_item['available'] >= 0)
+      self.assertTrue(dev_item['used'] >= 0)
+      self.assertTrue(dev_item['percent'] != None)
+      self.assertTrue(dev_item['device'] != None)
+      self.assertTrue(dev_item['mountpoint'] != None)
+      self.assertTrue(dev_item['size'] > 0)
 
+    for os_disk_item in osdisks:
+      self.assertTrue(os_disk_item['available'] >= 0)
+      self.assertTrue(os_disk_item['used'] >= 0)
+      self.assertTrue(os_disk_item['percent'] != None)
+      self.assertTrue(os_disk_item['device'] != None)
+      self.assertTrue(os_disk_item['mountpoint'] != None)
+      self.assertTrue(os_disk_item['size'] > 0)
+
+    self.assertTrue(len(result['mounts']) == len(osdisks))
+    

+ 0 - 91
ambari-server/src/main/java/org/apache/ambari/server/controller/ganglia/GangliaHelper.java

@@ -1,91 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.controller.ganglia;
-
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.type.TypeReference;
-
-import java.io.IOException;
-import java.net.URL;
-import java.net.URLConnection;
-import java.util.*;
-
-/**
- */
-public class GangliaHelper {
-
-  public static List<GangliaMetric> getGangliaProperty(String target,
-                                                       String gangliaCluster,
-                                                       String host,
-                                                       String metric) {
-    String s = "http://" +
-        target +
-        "/ganglia/graph.php" +
-        "?c=" + gangliaCluster +
-        "&h=" + (host == null ? "" : host) +
-        "&m=" + metric +
-        "&json=1";
-
-    try {
-      URLConnection connection = new URL(s).openConnection();
-
-      connection.setDoOutput(true);
-
-      return new ObjectMapper().readValue(connection.getInputStream(),
-          new TypeReference<List<GangliaMetric>>() {
-          });
-
-    } catch (IOException e) {
-      throw new IllegalStateException("Can't get metric " + metric + ".", e);
-    }
-  }
-
-
-  public static List<GangliaMetric> getGangliaMetrics(String target,
-                                                      String gangliaCluster,
-                                                      String host,
-                                                      String metric,
-                                                      Date startTime,
-                                                      Date endTime,
-                                                      long step) {
-    String s = "http://" +
-        target +
-        "/ganglia/graph.php" +
-        "?c=" + gangliaCluster +
-        "&h=" + (host == null ? "" : host) +
-        "&m=" + metric +
-        "&cs=" + startTime.getTime() / 1000 +
-        "&ce=" + endTime.getTime() / 1000 +
-        "&step=" + step +
-        "&json=1";
-
-    try {
-      URLConnection connection = new URL(s).openConnection();
-
-      connection.setDoOutput(true);
-
-      return new ObjectMapper().readValue(connection.getInputStream(),
-          new TypeReference<List<GangliaMetric>>() {
-          });
-
-    } catch (IOException e) {
-      throw new IllegalStateException("Can't get metric " + metric + ".", e);
-    }
-  }
-}

+ 31 - 1
ambari-server/src/main/java/org/apache/ambari/server/controller/ganglia/GangliaMetric.java

@@ -20,18 +20,45 @@ package org.apache.ambari.server.controller.ganglia;
 
 
 /**
- *
+ * Data structure for temporal data returned from Ganglia Web.
  */
 public class GangliaMetric {
 
+  // Note that the member names correspond to the names in the JSON returned from Ganglia Web.
+
+  /**
+   * The name.
+   */
   private String ds_name;
+
+  /**
+   * The ganglia cluster name.
+   */
   private String cluster_name;
+
+  /**
+   * The graph type.
+   */
   private String graph_type;
+
+  /**
+   * The host name.
+   */
   private String host_name;
+
+  /**
+   * The metric name.
+   */
   private String metric_name;
 
+  /**
+   * The temporal data points.
+   */
   private double[][] datapoints;
 
+
+  // ----- GangliaMetric -----------------------------------------------------
+
   public String getDs_name() {
     return ds_name;
   }
@@ -80,6 +107,9 @@ public class GangliaMetric {
     this.datapoints = datapoints;
   }
 
+
+  // ----- Object overrides --------------------------------------------------
+
   @Override
   public String toString() {
     StringBuilder stringBuilder = new StringBuilder();

+ 127 - 67
ambari-server/src/main/java/org/apache/ambari/server/controller/ganglia/GangliaPropertyProvider.java

@@ -18,15 +18,18 @@
 
 package org.apache.ambari.server.controller.ganglia;
 
-import org.apache.ambari.server.controller.internal.PropertyIdImpl;
+import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.controller.spi.Predicate;
 import org.apache.ambari.server.controller.spi.PropertyId;
 import org.apache.ambari.server.controller.spi.PropertyProvider;
 import org.apache.ambari.server.controller.spi.Request;
 import org.apache.ambari.server.controller.spi.Resource;
-import org.apache.ambari.server.controller.utilities.PredicateHelper;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.apache.ambari.server.controller.utilities.StreamProvider;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.type.TypeReference;
 
+import java.io.IOException;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
@@ -38,58 +41,47 @@ import java.util.Set;
  */
 public class GangliaPropertyProvider implements PropertyProvider {
 
-  /**
-   * Map of property ids supported by this provider.
-   */
-  private static final Set<PropertyId> PROPERTY_IDS = new HashSet<PropertyId>();
+  protected static final PropertyId HOST_COMPONENT_HOST_NAME_PROPERTY_ID = PropertyHelper.getPropertyId("host_name", "HostRoles");
+  protected static final PropertyId HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID = PropertyHelper.getPropertyId("component_name", "HostRoles");
 
-  static {
-    PROPERTY_IDS.add(new PropertyIdImpl("cpu_nice", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("cpu_nice", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("cpu_wio", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("cpu_user", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("cpu_idle", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("cpu_system", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("cpu_aidle", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("disk_free", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("disk_total", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("part_max_used", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("load_one", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("load_five", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("load_fifteen", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("swap_free", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("mem_cached", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("mem_free", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("mem_buffers", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("mem_shared", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("bytes_out", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("bytes_in", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("pkts_in", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("pkts_out", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("proc_run", null, true));
-    PROPERTY_IDS.add(new PropertyIdImpl("proc_total", null, true));
-  }
 
   /**
-   * The Ganglia source.
-   * //TODO : where do we get this from?
+   * Set of property ids supported by this provider.
    */
-  private static final String GANGLIA_SOURCE = "ec2-107-22-86-120.compute-1.amazonaws.com";
+  private final Set<PropertyId> propertyIds;
 
+  private final StreamProvider streamProvider;
+
+  private final String gangliaCollectorHostName;
 
   /**
    * Map of Ganglia cluster names keyed by component type.
    */
-  private static final Map<String, String> COMPONENT_MAP = new HashMap<String, String>();
+  private static final Map<String, String> GANGLIA_CLUSTER_NAMES = new HashMap<String, String>();
 
   static {
-    COMPONENT_MAP.put("NAMENODE", "HDPNameNode");
-    COMPONENT_MAP.put("JOBTRACKER", "HDPJobTracker");
-//        COMPONENT_MAP.put("???", "HDPSlaves");
+    GANGLIA_CLUSTER_NAMES.put("NAMENODE",    "HDPNameNode");
+    GANGLIA_CLUSTER_NAMES.put("JOBTRACKER",  "HDPJobTracker");
+    GANGLIA_CLUSTER_NAMES.put("DATANODE",    "HDPSlaves");
+    GANGLIA_CLUSTER_NAMES.put("TASKTRACKER", "HDPSlaves");
+  }
+
+
+  // ----- Constructors ------------------------------------------------------
+
+  public GangliaPropertyProvider(Set<PropertyId> propertyIds,
+                                 StreamProvider streamProvider,
+                                 String gangliaCollectorHostName) {
+    this.propertyIds              = propertyIds;
+    this.streamProvider           = streamProvider;
+    this.gangliaCollectorHostName = gangliaCollectorHostName;
   }
 
+
+  // ----- PropertyProvider --------------------------------------------------
+
   @Override
-  public Set<Resource> populateResources(Set<Resource> resources, Request request, Predicate predicate) {
+  public Set<Resource> populateResources(Set<Resource> resources, Request request, Predicate predicate) throws AmbariException{
     Set<Resource> keepers = new HashSet<Resource>();
     for (Resource resource : resources) {
       if (populateResource(resource, request, predicate)) {
@@ -101,49 +93,72 @@ public class GangliaPropertyProvider implements PropertyProvider {
 
   @Override
   public Set<PropertyId> getPropertyIds() {
-    return PROPERTY_IDS;
+    return propertyIds;
   }
 
-  private boolean populateResource(Resource resource, Request request, Predicate predicate) {
 
-    Set<PropertyId> ids = new HashSet<PropertyId>(request.getPropertyIds());
-    if (ids == null || ids.isEmpty()) {
-      ids = getPropertyIds();
-    } else {
-      if (predicate != null) {
-        ids.addAll(PredicateHelper.getPropertyIds(predicate));
-      }
-      ids.retainAll(getPropertyIds());
-    }
+  // ----- helper methods ----------------------------------------------------
 
-    String host = resource.getPropertyValue(new PropertyIdImpl("host_name", "HostRoles", false));
-    String component = resource.getPropertyValue(new PropertyIdImpl("component_name", "HostRoles", false));
+  /**
+   * Populate a resource by obtaining the requested Ganglia metrics.
+   *
+   * @param resource  the resource to be populated
+   * @param request   the request
+   * @param predicate the predicate
+   *
+   * @return true if the resource was successfully populated with the requested properties
+   *
+   * @throws AmbariException thrown if the resource cannot be populated
+   */
+  private boolean populateResource(Resource resource, Request request, Predicate predicate) throws AmbariException{
 
-    // ---- TODO : HACK to fix host name that's been made all lower case... Ganglia doesn't like!!
-    host = PropertyHelper.fixHostName(host);
-    // -----
+    if (getPropertyIds().isEmpty()) {
+      return true;
+    }
+    Set<PropertyId> ids = PropertyHelper.getRequestPropertyIds(getPropertyIds(), request, predicate);
 
-    String cluster = COMPONENT_MAP.get(component);
+    String hostName           = PropertyHelper.fixHostName(resource.getPropertyValue(HOST_COMPONENT_HOST_NAME_PROPERTY_ID));
+    String componentName      = resource.getPropertyValue(HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID);
+    String gangliaClusterName = GANGLIA_CLUSTER_NAMES.get(componentName);
 
-    if (cluster == null) {
+    if (gangliaClusterName == null) {
       return true;
     }
 
     for (PropertyId propertyId : ids) {
 
-      String property = (propertyId.getCategory() == null ? "" : propertyId.getCategory() + ".") +
-          propertyId.getName();
-
-      List<GangliaMetric> properties = GangliaHelper.getGangliaProperty(GANGLIA_SOURCE, cluster, host, property);
-
-      double[][] dataPoints = properties.get(0).getDatapoints();
-
-      resource.setProperty(propertyId, getTemporalValue(dataPoints));
+// TODO : ignoring category for now..
+//      String category = propertyId.getCategory();
+//      String property = (category == null || category.length() == 0 ? "" : category + ".") +
+//          propertyId.getName();
+      String property = propertyId.getName();
+
+      Request.TemporalInfo temporalInfo = request.getTemporalInfo(propertyId);
+      String spec = getSpec(gangliaClusterName, hostName, property,
+          temporalInfo.getStartTime(), temporalInfo.getEndTime(), temporalInfo.getStep());
+
+      try {
+        List<GangliaMetric> properties = new ObjectMapper().readValue(streamProvider.readFrom(spec),
+            new TypeReference<List<GangliaMetric>>() {
+        });
+        resource.setProperty(propertyId, getTemporalValue(properties.get(0)));
+      } catch (IOException e) {
+        throw new AmbariException("Can't get metrics : " + property, e);
+      }
     }
     return true;
   }
 
-  private String getTemporalValue(double[][] dataPoints) {
+  /**
+   * Get a string representation of the temporal data from the given metric.
+   *
+   * @param metric  the metric
+   *
+   * @return the string representation of the temporal data
+   */
+  private String getTemporalValue(GangliaMetric metric) {
+    double[][] dataPoints = metric.getDatapoints();
+
     boolean first = true;
     StringBuilder stringBuilder = new StringBuilder();
     stringBuilder.append("[");
@@ -161,5 +176,50 @@ public class GangliaPropertyProvider implements PropertyProvider {
     stringBuilder.append("]");
     return stringBuilder.toString();
   }
+
+  /**
+   * Get the spec to locate the Ganglia stream from the given
+   * request info.
+   *
+   * @param gangliaCluster  the ganglia cluster name
+   * @param host            the host name
+   * @param metric          the metric
+   * @param startTime       the start time of the temporal data
+   * @param endTime         the end time of the temporal data
+   * @param step            the step for the temporal data
+   *
+   * @return the spec
+   */
+  protected String getSpec(String gangliaCluster,
+                                  String host,
+                                  String metric,
+                                  Long startTime,
+                                  Long endTime,
+                                  Long step) {
+
+    StringBuilder sb = new StringBuilder();
+
+    sb.append("http://").
+       append(gangliaCollectorHostName).
+       append("/ganglia/graph.php?c=").
+       append(gangliaCluster).
+       append("&h=").
+       append(host == null ? "" : host).
+       append("&m=").
+       append(metric);
+
+    if (startTime != null) {
+      sb.append("&cs=").append(startTime);
+    }
+    if (endTime != null) {
+      sb.append("&ce=").append(endTime);
+    }
+    if (step != null) {
+      sb.append("&step=").append(step);
+    }
+    sb.append("&json=1");
+
+    return sb.toString();
+  }
 }
 

+ 788 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/ganglia/MetricsMapping.java

@@ -0,0 +1,788 @@
+package org.apache.ambari.server.controller.ganglia;
+
+/**
+ *
+ */
+public class MetricsMapping {
+
+  private static String[] metrics = new String[] {
+      "cpu_nice",
+      "cpu_nice",
+      "cpu_wio",
+      "cpu_wio",
+      "cpu_user",
+      "cpu_user",
+      "cpu_idle",
+      "cpu_idle",
+      "cpu_system",
+      "cpu_system",
+      "cpu_aidle",
+      "cpu_aidle",
+      "dfs.datanode.heartBeats_avg_time",
+      "dfs.datanode.heartBeats_avg_time",
+      "dfs.datanode.bytes_written",
+      "dfs.datanode.bytes_written",
+      "dfs.datanode.writes_from_local_client",
+      "dfs.datanode.writes_from_local_client",
+      "dfs.datanode.blocks_verified",
+      "dfs.datanode.blocks_verified",
+      "dfs.datanode.heartBeats_num_ops",
+      "dfs.datanode.heartBeats_num_ops",
+      "dfs.datanode.writeBlockOp_num_ops",
+      "dfs.datanode.writeBlockOp_num_ops",
+      "dfs.datanode.writeBlockOp_avg_time",
+      "dfs.datanode.writeBlockOp_avg_time",
+      "dfs.datanode.blockReports_num_ops",
+      "dfs.datanode.blockReports_num_ops",
+      "dfs.datanode.writes_from_remote_client",
+      "dfs.datanode.writes_from_remote_client",
+      "dfs.datanode.blockReports_avg_time",
+      "dfs.datanode.blockReports_avg_time",
+      "dfs.datanode.blocks_written",
+      "dfs.datanode.blocks_written",
+      "disk_free",
+      "disk_free",
+      "disk_total",
+      "disk_total",
+      "part_max_used",
+      "part_max_used",
+      "hbase.regionserver.deleteRequestLatency_max",
+      "hbase.regionserver.deleteRequestLatency_max",
+      "hbase.regionserver.putRequestLatency_std_dev",
+      "hbase.regionserver.putRequestLatency_std_dev",
+      "hbase.regionserver.putRequestLatency_max",
+      "hbase.regionserver.putRequestLatency_max",
+      "hbase.regionserver.putRequestLatency_num_ops",
+      "hbase.regionserver.putRequestLatency_num_ops",
+      "hbase.regionserver.requests",
+      "hbase.regionserver.requests",
+      "hbase.regionserver.fsReadLatencyHistogram_median",
+      "hbase.regionserver.fsReadLatencyHistogram_median",
+      "hbase.regionserver.deleteRequestLatency_99th_percentile",
+      "hbase.regionserver.deleteRequestLatency_99th_percentile",
+      "hbase.regionserver.writeRequestsCount",
+      "hbase.regionserver.writeRequestsCount",
+      "hbase.regionserver.hdfsBlocksLocalityIndex",
+      "hbase.regionserver.hdfsBlocksLocalityIndex",
+      "hbase.regionserver.compactionQueueSize",
+      "hbase.regionserver.compactionQueueSize",
+      "hbase.regionserver.deleteRequestLatency_mean",
+      "hbase.regionserver.deleteRequestLatency_mean",
+      "hbase.regionserver.totalStaticBloomSizeKB",
+      "hbase.regionserver.totalStaticBloomSizeKB",
+      "hbase.regionserver.flushSize_num_ops",
+      "hbase.regionserver.flushSize_num_ops",
+      "hbase.regionserver.getRequestLatency_99th_percentile",
+      "hbase.regionserver.getRequestLatency_99th_percentile",
+      "hbase.regionserver.flushSize_avg_time",
+      "hbase.regionserver.flushSize_avg_time",
+      "hbase.regionserver.fsWriteLatencyHistogram_95th_percentile",
+      "hbase.regionserver.fsWriteLatencyHistogram_95th_percentile",
+      "hbase.regionserver.flushTime_num_ops",
+      "hbase.regionserver.flushTime_num_ops",
+      "hbase.regionserver.deleteRequestLatency_median",
+      "hbase.regionserver.deleteRequestLatency_median",
+      "hbase.regionserver.blockCacheSize",
+      "hbase.regionserver.blockCacheSize",
+      "hbase.regionserver.fsSyncLatency_num_ops",
+      "hbase.regionserver.fsSyncLatency_num_ops",
+      "hbase.regionserver.storefiles",
+      "hbase.regionserver.storefiles",
+      "hbase.regionserver.getRequestLatency_min",
+      "hbase.regionserver.getRequestLatency_min",
+      "hbase.regionserver.regions",
+      "hbase.regionserver.regions",
+      "hbase.regionserver.fsReadLatencyHistogram_max",
+      "hbase.regionserver.fsReadLatencyHistogram_max",
+      "hbase.regionserver.getRequestLatency_mean",
+      "hbase.regionserver.getRequestLatency_mean",
+      "hbase.regionserver.fsReadLatency_num_ops",
+      "hbase.regionserver.fsReadLatency_num_ops",
+      "hbase.regionserver.blockCacheMissCount",
+      "hbase.regionserver.blockCacheMissCount",
+      "hbase.regionserver.putRequestLatency_75th_percentile",
+      "hbase.regionserver.putRequestLatency_75th_percentile",
+      "hbase.regionserver.fsReadLatencyHistogram_99th_percentile",
+      "hbase.regionserver.fsReadLatencyHistogram_99th_percentile",
+      "hbase.regionserver.totalStaticIndexSizeKB",
+      "hbase.regionserver.totalStaticIndexSizeKB",
+      "hbase.regionserver.compactionTime_avg_time",
+      "hbase.regionserver.compactionTime_avg_time",
+      "hbase.regionserver.fsWriteLatencyHistogram_min",
+      "hbase.regionserver.fsWriteLatencyHistogram_min",
+      "hbase.regionserver.fsSyncLatency_avg_time",
+      "hbase.regionserver.fsSyncLatency_avg_time",
+      "hbase.regionserver.deleteRequestLatency_75th_percentile",
+      "hbase.regionserver.deleteRequestLatency_75th_percentile",
+      "hbase.regionserver.hlogFileCount",
+      "hbase.regionserver.hlogFileCount",
+      "hbase.regionserver.getRequestLatency_std_dev",
+      "hbase.regionserver.getRequestLatency_std_dev",
+      "hbase.regionserver.getRequestLatency_median",
+      "hbase.regionserver.getRequestLatency_median",
+      "hbase.regionserver.putRequestLatency_median",
+      "hbase.regionserver.putRequestLatency_median",
+      "hbase.regionserver.rootIndexSizeKB",
+      "hbase.regionserver.rootIndexSizeKB",
+      "hbase.regionserver.getRequestLatency_num_ops",
+      "hbase.regionserver.getRequestLatency_num_ops",
+      "hbase.regionserver.getRequestLatency_75th_percentile",
+      "hbase.regionserver.getRequestLatency_75th_percentile",
+      "hbase.regionserver.putRequestLatency_95th_percentile",
+      "hbase.regionserver.putRequestLatency_95th_percentile",
+      "hbase.regionserver.fsWriteLatencyHistogram_std_dev",
+      "hbase.regionserver.fsWriteLatencyHistogram_std_dev",
+      "hbase.regionserver.memstoreSizeMB",
+      "hbase.regionserver.memstoreSizeMB",
+      "hbase.regionserver.fsWriteLatencyHistogram_num_ops",
+      "hbase.regionserver.fsWriteLatencyHistogram_num_ops",
+      "hbase.regionserver.deleteRequestLatency_min",
+      "hbase.regionserver.deleteRequestLatency_min",
+      "hbase.regionserver.fsReadLatencyHistogram_75th_percentile",
+      "hbase.regionserver.fsReadLatencyHistogram_75th_percentile",
+      "hbase.regionserver.putRequestLatency_min",
+      "hbase.regionserver.putRequestLatency_min",
+      "hbase.regionserver.fsReadLatencyHistogram_std_dev",
+      "hbase.regionserver.fsReadLatencyHistogram_std_dev",
+      "hbase.regionserver.deleteRequestLatency_std_dev",
+      "hbase.regionserver.deleteRequestLatency_std_dev",
+      "hbase.regionserver.fsReadLatency_avg_time",
+      "hbase.regionserver.fsReadLatency_avg_time",
+      "hbase.regionserver.fsReadLatencyHistogram_num_ops",
+      "hbase.regionserver.fsReadLatencyHistogram_num_ops",
+      "hbase.regionserver.deleteRequestLatency_num_ops",
+      "hbase.regionserver.deleteRequestLatency_num_ops",
+      "hbase.regionserver.blockCacheEvictedCount",
+      "hbase.regionserver.blockCacheEvictedCount",
+      "hbase.regionserver.fsWriteLatencyHistogram_99th_percentile",
+      "hbase.regionserver.fsWriteLatencyHistogram_99th_percentile",
+      "hbase.regionserver.deleteRequestLatency_95th_percentile",
+      "hbase.regionserver.deleteRequestLatency_95th_percentile",
+      "hbase.regionserver.getRequestLatency_max",
+      "hbase.regionserver.getRequestLatency_max",
+      "hbase.regionserver.fsWriteLatencyHistogram_mean",
+      "hbase.regionserver.fsWriteLatencyHistogram_mean",
+      "hbase.regionserver.storefileIndexSizeMB",
+      "hbase.regionserver.storefileIndexSizeMB",
+      "hbase.regionserver.compactionSize_avg_time",
+      "hbase.regionserver.compactionSize_avg_time",
+      "hbase.regionserver.getRequestLatency_95th_percentile",
+      "hbase.regionserver.getRequestLatency_95th_percentile",
+      "hbase.regionserver.flushQueueSize",
+      "hbase.regionserver.flushQueueSize",
+      "hbase.regionserver.compactionSize_num_ops",
+      "hbase.regionserver.compactionSize_num_ops",
+      "hbase.regionserver.putRequestLatency_mean",
+      "hbase.regionserver.putRequestLatency_mean",
+      "hbase.regionserver.compactionTime_num_ops",
+      "hbase.regionserver.compactionTime_num_ops",
+      "hbase.regionserver.stores",
+      "hbase.regionserver.stores",
+      "hbase.regionserver.fsReadLatencyHistogram_min",
+      "hbase.regionserver.fsReadLatencyHistogram_min",
+      "hbase.regionserver.fsWriteLatency_avg_time",
+      "hbase.regionserver.fsWriteLatency_avg_time",
+      "hbase.regionserver.fsReadLatencyHistogram_mean",
+      "hbase.regionserver.fsReadLatencyHistogram_mean",
+      "hbase.regionserver.fsReadLatencyHistogram_95th_percentile",
+      "hbase.regionserver.fsReadLatencyHistogram_95th_percentile",
+      "hbase.regionserver.flushTime_avg_time",
+      "hbase.regionserver.flushTime_avg_time",
+      "hbase.regionserver.fsWriteLatencyHistogram_median",
+      "hbase.regionserver.fsWriteLatencyHistogram_median",
+      "hbase.regionserver.fsWriteLatencyHistogram_max",
+      "hbase.regionserver.fsWriteLatencyHistogram_max",
+      "hbase.regionserver.blockCacheHitCachingRatio",
+      "hbase.regionserver.blockCacheHitCachingRatio",
+      "hbase.regionserver.blockCacheHitCount",
+      "hbase.regionserver.blockCacheHitCount",
+      "hbase.regionserver.blockCacheCount",
+      "hbase.regionserver.blockCacheCount",
+      "hbase.regionserver.readRequestsCount",
+      "hbase.regionserver.readRequestsCount",
+      "hbase.regionserver.blockCacheFree",
+      "hbase.regionserver.blockCacheFree",
+      "hbase.regionserver.putRequestLatency_99th_percentile",
+      "hbase.regionserver.putRequestLatency_99th_percentile",
+      "hbase.regionserver.fsWriteLatencyHistogram_75th_percentile",
+      "hbase.regionserver.fsWriteLatencyHistogram_75th_percentile",
+      "hbase.regionserver.blockCacheHitRatio",
+      "hbase.regionserver.blockCacheHitRatio",
+      "hbase.regionserver.fsWriteLatency_num_ops",
+      "hbase.regionserver.fsWriteLatency_num_ops",
+      "jvm.metrics.gcCount",
+      "jvm.metrics.gcCount",
+      "jvm.metrics.threadsTimedWaiting",
+      "jvm.metrics.threadsTimedWaiting",
+      "jvm.metrics.logWarn",
+      "jvm.metrics.logWarn",
+      "jvm.metrics.threadsBlocked",
+      "jvm.metrics.threadsBlocked",
+      "jvm.metrics.logError",
+      "jvm.metrics.logError",
+      "jvm.metrics.logFatal",
+      "jvm.metrics.logFatal",
+      "jvm.metrics.threadsNew",
+      "jvm.metrics.threadsNew",
+      "jvm.metrics.memHeapCommittedM",
+      "jvm.metrics.memHeapCommittedM",
+      "jvm.metrics.threadsWaiting",
+      "jvm.metrics.threadsWaiting",
+      "jvm.metrics.memNonHeapCommittedM",
+      "jvm.metrics.memNonHeapCommittedM",
+      "jvm.metrics.maxMemoryM",
+      "jvm.metrics.maxMemoryM",
+      "jvm.metrics.threadsTerminated",
+      "jvm.metrics.threadsTerminated",
+      "jvm.metrics.threadsRunnable",
+      "jvm.metrics.threadsRunnable",
+      "jvm.metrics.memNonHeapUsedM",
+      "jvm.metrics.memNonHeapUsedM",
+      "jvm.metrics.gcTimeMillis",
+      "jvm.metrics.gcTimeMillis",
+      "jvm.metrics.memHeapUsedM",
+      "jvm.metrics.memHeapUsedM",
+      "jvm.metrics.logInfo",
+      "jvm.metrics.logInfo",
+      "load_one",
+      "load_one",
+      "load_five",
+      "load_five",
+      "load_fifteen",
+      "load_fifteen",
+      "mapred.shuffleOutput.shuffle_handler_busy_percent",
+      "mapred.shuffleOutput.shuffle_handler_busy_percent",
+      "mapred.tasktracker.reduces_running",
+      "mapred.tasktracker.reduces_running",
+      "mapred.tasktracker.reduceTaskSlots",
+      "mapred.tasktracker.reduceTaskSlots",
+      "mapred.tasktracker.maps_running",
+      "mapred.tasktracker.maps_running",
+      "mapred.tasktracker.mapTaskSlots",
+      "mapred.tasktracker.mapTaskSlots",
+      "swap_free",
+      "swap_free",
+      "mem_cached",
+      "mem_cached",
+      "mem_free",
+      "mem_free",
+      "mem_buffers",
+      "mem_buffers",
+      "mem_shared",
+      "mem_shared",
+      "metricssystem.MetricsSystem.publish_max_time",
+      "metricssystem.MetricsSystem.publish_max_time",
+      "metricssystem.MetricsSystem.publish_num_ops",
+      "metricssystem.MetricsSystem.publish_num_ops",
+      "metricssystem.MetricsSystem.snapshot_stdev_time",
+      "metricssystem.MetricsSystem.snapshot_stdev_time",
+      "metricssystem.MetricsSystem.snapshot_imax_time",
+      "metricssystem.MetricsSystem.snapshot_imax_time",
+      "metricssystem.MetricsSystem.num_sinks",
+      "metricssystem.MetricsSystem.num_sinks",
+      "metricssystem.MetricsSystem.snapshot_min_time",
+      "metricssystem.MetricsSystem.snapshot_min_time",
+      "metricssystem.MetricsSystem.snapshot_num_ops",
+      "metricssystem.MetricsSystem.snapshot_num_ops",
+      "metricssystem.MetricsSystem.snapshot_avg_time",
+      "metricssystem.MetricsSystem.snapshot_avg_time",
+      "metricssystem.MetricsSystem.dropped_pub_all",
+      "metricssystem.MetricsSystem.dropped_pub_all",
+      "metricssystem.MetricsSystem.sink.ganglia.latency_avg_time",
+      "metricssystem.MetricsSystem.sink.ganglia.latency_avg_time",
+      "metricssystem.MetricsSystem.publish_stdev_time",
+      "metricssystem.MetricsSystem.publish_stdev_time",
+      "metricssystem.MetricsSystem.publish_imin_time",
+      "metricssystem.MetricsSystem.publish_imin_time",
+      "metricssystem.MetricsSystem.num_sources",
+      "metricssystem.MetricsSystem.num_sources",
+      "metricssystem.MetricsSystem.snapshot_max_time",
+      "metricssystem.MetricsSystem.snapshot_max_time",
+      "metricssystem.MetricsSystem.sink.ganglia.latency_num_ops",
+      "metricssystem.MetricsSystem.sink.ganglia.latency_num_ops",
+      "metricssystem.MetricsSystem.publish_imax_time",
+      "metricssystem.MetricsSystem.publish_imax_time",
+      "metricssystem.MetricsSystem.publish_min_time",
+      "metricssystem.MetricsSystem.publish_min_time",
+      "metricssystem.MetricsSystem.publish_avg_time",
+      "metricssystem.MetricsSystem.publish_avg_time",
+      "metricssystem.MetricsSystem.snapshot_imin_time",
+      "metricssystem.MetricsSystem.snapshot_imin_time",
+      "bytes_out",
+      "bytes_out",
+      "pkts_in",
+      "pkts_in",
+      "pkts_out",
+      "pkts_out",
+      "bytes_in",
+      "bytes_in",
+      "proc_total",
+      "proc_total",
+      "proc_run",
+      "proc_run",
+      "rpc.metrics.getRegionInfo_num_ops",
+      "rpc.metrics.getRegionInfo_num_ops",
+      "rpc.metrics.isStopped_avg_time",
+      "rpc.metrics.isStopped_avg_time",
+      "rpc.metrics.RpcQueueTime_avg_time",
+      "rpc.metrics.RpcQueueTime_avg_time",
+      "rpc.metrics.isMasterRunning_avg_time",
+      "rpc.metrics.isMasterRunning_avg_time",
+      "rpc.metrics.delete_num_ops",
+      "rpc.metrics.delete_num_ops",
+      "rpc.metrics.getClusterStatus_avg_time",
+      "rpc.metrics.getClusterStatus_avg_time",
+      "rpc.metrics.getFromOnlineRegions_num_ops",
+      "rpc.metrics.getFromOnlineRegions_num_ops",
+      "rpc.metrics.flushRegion_avg_time",
+      "rpc.metrics.flushRegion_avg_time",
+      "rpc.metrics.getClosestRowBefore_num_ops",
+      "rpc.metrics.getClosestRowBefore_num_ops",
+      "rpc.metrics.ReceivedBytes",
+      "rpc.metrics.ReceivedBytes",
+      "rpc.metrics.addToOnlineRegions_avg_time",
+      "rpc.metrics.addToOnlineRegions_avg_time",
+      "rpc.metrics.splitRegion_avg_time",
+      "rpc.metrics.splitRegion_avg_time",
+      "rpc.metrics.regionServerReport_num_ops",
+      "rpc.metrics.regionServerReport_num_ops",
+      "rpc.metrics.getProtocolSignature_num_ops",
+      "rpc.metrics.getProtocolSignature_num_ops",
+      "rpc.metrics.offline_avg_time",
+      "rpc.metrics.offline_avg_time",
+      "rpc.metrics.checkAndDelete_num_ops",
+      "rpc.metrics.checkAndDelete_num_ops",
+      "rpc.metrics.abort_avg_time",
+      "rpc.metrics.abort_avg_time",
+      "rpc.metrics.openScanner_avg_time",
+      "rpc.metrics.openScanner_avg_time",
+      "rpc.metrics.removeFromOnlineRegions_num_ops",
+      "rpc.metrics.removeFromOnlineRegions_num_ops",
+      "rpc.metrics.stop_num_ops",
+      "rpc.metrics.stop_num_ops",
+      "rpc.metrics.shutdown_num_ops",
+      "rpc.metrics.shutdown_num_ops",
+      "rpc.metrics.getCatalogTracker_avg_time",
+      "rpc.metrics.getCatalogTracker_avg_time",
+      "rpc.metrics.regionServerStartup_avg_time",
+      "rpc.metrics.regionServerStartup_avg_time",
+      "rpc.metrics.disableTable_num_ops",
+      "rpc.metrics.disableTable_num_ops",
+      "rpc.metrics.getClosestRowBefore_avg_time",
+      "rpc.metrics.getClosestRowBefore_avg_time",
+      "rpc.metrics.move_num_ops",
+      "rpc.metrics.move_num_ops",
+      "rpc.metrics.disableTable_avg_time",
+      "rpc.metrics.disableTable_avg_time",
+      "rpc.metrics.assign_num_ops",
+      "rpc.metrics.assign_num_ops",
+      "rpc.metrics.balanceSwitch_num_ops",
+      "rpc.metrics.balanceSwitch_num_ops",
+      "rpc.metrics.getConfiguration_num_ops",
+      "rpc.metrics.getConfiguration_num_ops",
+      "rpc.metrics.createTable_num_ops",
+      "rpc.metrics.createTable_num_ops",
+      "rpc.metrics.getHTableDescriptors_num_ops",
+      "rpc.metrics.getHTableDescriptors_num_ops",
+      "rpc.metrics.exists_avg_time",
+      "rpc.metrics.exists_avg_time",
+      "rpc.metrics.lockRow_avg_time",
+      "rpc.metrics.lockRow_avg_time",
+      "rpc.metrics.openRegions_avg_time",
+      "rpc.metrics.openRegions_avg_time",
+      "rpc.metrics.bulkLoadHFiles_num_ops",
+      "rpc.metrics.bulkLoadHFiles_num_ops",
+      "rpc.metrics.RpcSlowResponse_avg_time",
+      "rpc.metrics.RpcSlowResponse_avg_time",
+      "rpc.metrics.incrementColumnValue_avg_time",
+      "rpc.metrics.incrementColumnValue_avg_time",
+      "rpc.metrics.execCoprocessor_avg_time",
+      "rpc.metrics.execCoprocessor_avg_time",
+      "rpc.metrics.openRegion_avg_time",
+      "rpc.metrics.openRegion_avg_time",
+      "rpc.metrics.getOnlineRegions_avg_time",
+      "rpc.metrics.getOnlineRegions_avg_time",
+      "rpc.metrics.closeRegion_num_ops",
+      "rpc.metrics.closeRegion_num_ops",
+      "rpc.metrics.enableTable_avg_time",
+      "rpc.metrics.enableTable_avg_time",
+      "rpc.metrics.replicateLogEntries_avg_time",
+      "rpc.metrics.replicateLogEntries_avg_time",
+      "rpc.metrics.NumOpenConnections",
+      "rpc.metrics.NumOpenConnections",
+      "rpc.metrics.getRegionInfo_avg_time",
+      "rpc.metrics.getRegionInfo_avg_time",
+      "rpc.metrics.exists_num_ops",
+      "rpc.metrics.exists_num_ops",
+      "rpc.metrics.compactRegion_num_ops",
+      "rpc.metrics.compactRegion_num_ops",
+      "rpc.metrics.checkAndDelete_avg_time",
+      "rpc.metrics.checkAndDelete_avg_time",
+      "rpc.metrics.unassign_num_ops",
+      "rpc.metrics.unassign_num_ops",
+      "rpc.metrics.createTable_avg_time",
+      "rpc.metrics.createTable_avg_time",
+      "rpc.metrics.getHTableDescriptors_avg_time",
+      "rpc.metrics.getHTableDescriptors_avg_time",
+      "rpc.metrics.rollHLogWriter_num_ops",
+      "rpc.metrics.rollHLogWriter_num_ops",
+      "rpc.metrics.closeRegion_avg_time",
+      "rpc.metrics.closeRegion_avg_time",
+      "rpc.metrics.RpcQueueTime_num_ops",
+      "rpc.metrics.RpcQueueTime_num_ops",
+      "rpc.metrics.isMasterRunning_num_ops",
+      "rpc.metrics.isMasterRunning_num_ops",
+      "rpc.metrics.getClusterStatus_num_ops",
+      "rpc.metrics.getClusterStatus_num_ops",
+      "rpc.metrics.isStopped_num_ops",
+      "rpc.metrics.isStopped_num_ops",
+      "rpc.metrics.checkAndPut_num_ops",
+      "rpc.metrics.checkAndPut_num_ops",
+      "rpc.metrics.isAborted_num_ops",
+      "rpc.metrics.isAborted_num_ops",
+      "rpc.metrics.RpcProcessingTime_num_ops",
+      "rpc.metrics.RpcProcessingTime_num_ops",
+      "rpc.metrics.rollHLogWriter_avg_time",
+      "rpc.metrics.rollHLogWriter_avg_time",
+      "rpc.metrics.openRegions_num_ops",
+      "rpc.metrics.openRegions_num_ops",
+      "rpc.metrics.lockRow_num_ops",
+      "rpc.metrics.lockRow_num_ops",
+      "rpc.metrics.unlockRow_avg_time",
+      "rpc.metrics.unlockRow_avg_time",
+      "rpc.metrics.close_num_ops",
+      "rpc.metrics.close_num_ops",
+      "rpc.metrics.getZooKeeper_avg_time",
+      "rpc.metrics.getZooKeeper_avg_time",
+      "rpc.metrics.reportRSFatalError_avg_time",
+      "rpc.metrics.reportRSFatalError_avg_time",
+      "rpc.metrics.unlockRow_num_ops",
+      "rpc.metrics.unlockRow_num_ops",
+      "rpc.metrics.flushRegion_num_ops",
+      "rpc.metrics.flushRegion_num_ops",
+      "rpc.metrics.stopMaster_avg_time",
+      "rpc.metrics.stopMaster_avg_time",
+      "rpc.metrics.getBlockCacheColumnFamilySummaries_num_ops",
+      "rpc.metrics.getBlockCacheColumnFamilySummaries_num_ops",
+      "rpc.metrics.getServerName_avg_time",
+      "rpc.metrics.getServerName_avg_time",
+      "rpc.metrics.getHServerInfo_avg_time",
+      "rpc.metrics.getHServerInfo_avg_time",
+      "rpc.metrics.RpcSlowResponse_num_ops",
+      "rpc.metrics.RpcSlowResponse_num_ops",
+      "rpc.metrics.increment_avg_time",
+      "rpc.metrics.increment_avg_time",
+      "rpc.metrics.getFromOnlineRegions_avg_time",
+      "rpc.metrics.getFromOnlineRegions_avg_time",
+      "rpc.metrics.get_num_ops",
+      "rpc.metrics.get_num_ops",
+      "rpc.metrics.reportRSFatalError_num_ops",
+      "rpc.metrics.reportRSFatalError_num_ops",
+      "rpc.metrics.getZooKeeper_num_ops",
+      "rpc.metrics.getZooKeeper_num_ops",
+      "rpc.metrics.shutdown_avg_time",
+      "rpc.metrics.shutdown_avg_time",
+      "rpc.metrics.stop_avg_time",
+      "rpc.metrics.stop_avg_time",
+      "rpc.metrics.splitRegion_num_ops",
+      "rpc.metrics.splitRegion_num_ops",
+      "rpc.metrics.addToOnlineRegions_num_ops",
+      "rpc.metrics.addToOnlineRegions_num_ops",
+      "rpc.metrics.bulkLoadHFiles_avg_time",
+      "rpc.metrics.bulkLoadHFiles_avg_time",
+      "rpc.metrics.deleteTable_num_ops",
+      "rpc.metrics.deleteTable_num_ops",
+      "rpc.metrics.getProtocolVersion_num_ops",
+      "rpc.metrics.getProtocolVersion_num_ops",
+      "rpc.metrics.next_num_ops",
+      "rpc.metrics.next_num_ops",
+      "rpc.metrics.RpcProcessingTime_avg_time",
+      "rpc.metrics.RpcProcessingTime_avg_time",
+      "rpc.metrics.execCoprocessor_num_ops",
+      "rpc.metrics.execCoprocessor_num_ops",
+      "rpc.metrics.checkAndPut_avg_time",
+      "rpc.metrics.checkAndPut_avg_time",
+      "rpc.metrics.isAborted_avg_time",
+      "rpc.metrics.isAborted_avg_time",
+      "rpc.metrics.incrementColumnValue_num_ops",
+      "rpc.metrics.incrementColumnValue_num_ops",
+      "rpc.metrics.deleteColumn_num_ops",
+      "rpc.metrics.deleteColumn_num_ops",
+      "rpc.metrics.getBlockCacheColumnFamilySummaries_avg_time",
+      "rpc.metrics.getBlockCacheColumnFamilySummaries_avg_time",
+      "rpc.metrics.checkOOME_num_ops",
+      "rpc.metrics.checkOOME_num_ops",
+      "rpc.metrics.getProtocolVersion_avg_time",
+      "rpc.metrics.getProtocolVersion_avg_time",
+      "rpc.metrics.deleteTable_avg_time",
+      "rpc.metrics.deleteTable_avg_time",
+      "rpc.metrics.next_avg_time",
+      "rpc.metrics.next_avg_time",
+      "rpc.metrics.rpcAuthorizationSuccesses",
+      "rpc.metrics.rpcAuthorizationSuccesses",
+      "rpc.metrics.addColumn_avg_time",
+      "rpc.metrics.addColumn_avg_time",
+      "rpc.metrics.getOnlineRegions_num_ops",
+      "rpc.metrics.getOnlineRegions_num_ops",
+      "rpc.metrics.modifyColumn_avg_time",
+      "rpc.metrics.modifyColumn_avg_time",
+      "rpc.metrics.getAlterStatus_avg_time",
+      "rpc.metrics.getAlterStatus_avg_time",
+      "rpc.metrics.openRegion_num_ops",
+      "rpc.metrics.openRegion_num_ops",
+      "rpc.metrics.multi_avg_time",
+      "rpc.metrics.multi_avg_time",
+      "rpc.metrics.put_avg_time",
+      "rpc.metrics.put_avg_time",
+      "rpc.metrics.SentBytes",
+      "rpc.metrics.SentBytes",
+      "rpc.metrics.stopMaster_num_ops",
+      "rpc.metrics.stopMaster_num_ops",
+      "rpc.metrics.callQueueLen",
+      "rpc.metrics.callQueueLen",
+      "rpc.metrics.getProtocolSignature_avg_time",
+      "rpc.metrics.getProtocolSignature_avg_time",
+      "rpc.metrics.regionServerReport_avg_time",
+      "rpc.metrics.regionServerReport_avg_time",
+      "rpc.metrics.getConfiguration_avg_time",
+      "rpc.metrics.getConfiguration_avg_time",
+      "rpc.metrics.offline_num_ops",
+      "rpc.metrics.offline_num_ops",
+      "rpc.metrics.move_avg_time",
+      "rpc.metrics.move_avg_time",
+      "rpc.metrics.assign_avg_time",
+      "rpc.metrics.assign_avg_time",
+      "rpc.metrics.balanceSwitch_avg_time",
+      "rpc.metrics.balanceSwitch_avg_time",
+      "rpc.metrics.addColumn_num_ops",
+      "rpc.metrics.addColumn_num_ops",
+      "rpc.metrics.rpcAuthorizationFailures",
+      "rpc.metrics.rpcAuthorizationFailures",
+      "rpc.metrics.unassign_avg_time",
+      "rpc.metrics.unassign_avg_time",
+      "rpc.metrics.enableTable_num_ops",
+      "rpc.metrics.enableTable_num_ops",
+      "rpc.metrics.compactRegion_avg_time",
+      "rpc.metrics.compactRegion_avg_time",
+      "rpc.metrics.balance_num_ops",
+      "rpc.metrics.balance_num_ops",
+      "rpc.metrics.modifyTable_num_ops",
+      "rpc.metrics.modifyTable_num_ops",
+      "rpc.metrics.close_avg_time",
+      "rpc.metrics.close_avg_time",
+      "rpc.metrics.getHServerInfo_num_ops",
+      "rpc.metrics.getHServerInfo_num_ops",
+      "rpc.metrics.get_avg_time",
+      "rpc.metrics.get_avg_time",
+      "rpc.metrics.getServerName_num_ops",
+      "rpc.metrics.getServerName_num_ops",
+      "rpc.metrics.openScanner_num_ops",
+      "rpc.metrics.openScanner_num_ops",
+      "rpc.metrics.deleteColumn_avg_time",
+      "rpc.metrics.deleteColumn_avg_time",
+      "rpc.metrics.checkOOME_avg_time",
+      "rpc.metrics.checkOOME_avg_time",
+      "rpc.metrics.abort_num_ops",
+      "rpc.metrics.abort_num_ops",
+      "rpc.metrics.modifyColumn_num_ops",
+      "rpc.metrics.modifyColumn_num_ops",
+      "rpc.metrics.getAlterStatus_num_ops",
+      "rpc.metrics.getAlterStatus_num_ops",
+      "rpc.metrics.replicateLogEntries_num_ops",
+      "rpc.metrics.replicateLogEntries_num_ops",
+      "rpc.metrics.modifyTable_avg_time",
+      "rpc.metrics.modifyTable_avg_time",
+      "rpc.metrics.balance_avg_time",
+      "rpc.metrics.balance_avg_time",
+      "rpc.metrics.rpcAuthenticationFailures",
+      "rpc.metrics.rpcAuthenticationFailures",
+      "rpc.metrics.rpcAuthenticationSuccesses",
+      "rpc.metrics.rpcAuthenticationSuccesses",
+      "rpc.metrics.delete_avg_time",
+      "rpc.metrics.delete_avg_time",
+      "rpc.metrics.increment_num_ops",
+      "rpc.metrics.increment_num_ops",
+      "rpc.metrics.getCatalogTracker_num_ops",
+      "rpc.metrics.getCatalogTracker_num_ops",
+      "rpc.metrics.regionServerStartup_num_ops",
+      "rpc.metrics.regionServerStartup_num_ops",
+      "rpc.metrics.removeFromOnlineRegions_avg_time",
+      "rpc.metrics.removeFromOnlineRegions_avg_time",
+      "rpc.metrics.multi_num_ops",
+      "rpc.metrics.multi_num_ops",
+      "rpc.metrics.put_num_ops",
+      "rpc.metrics.put_num_ops",
+      "rpc.metrics.abort.aboveOneSec._num_ops",
+      "rpc.metrics.abort.aboveOneSec._num_ops",
+      "rpc.metrics.abort.aboveOneSec._avg_time",
+      "rpc.metrics.abort.aboveOneSec._avg_time",
+      "rpc.metrics.addToOnlineRegions.aboveOneSec._avg_time",
+      "rpc.metrics.addToOnlineRegions.aboveOneSec._avg_time",
+      "rpc.metrics.addToOnlineRegions.aboveOneSec._num_ops",
+      "rpc.metrics.addToOnlineRegions.aboveOneSec._num_ops",
+      "rpc.metrics.bulkLoadHFiles.aboveOneSec._avg_time",
+      "rpc.metrics.bulkLoadHFiles.aboveOneSec._avg_time",
+      "rpc.metrics.bulkLoadHFiles.aboveOneSec._num_ops",
+      "rpc.metrics.bulkLoadHFiles.aboveOneSec._num_ops",
+      "rpc.metrics.checkAndDelete.aboveOneSec._avg_time",
+      "rpc.metrics.checkAndDelete.aboveOneSec._avg_time",
+      "rpc.metrics.checkAndDelete.aboveOneSec._num_ops",
+      "rpc.metrics.checkAndDelete.aboveOneSec._num_ops",
+      "rpc.metrics.checkAndPut.aboveOneSec._avg_time",
+      "rpc.metrics.checkAndPut.aboveOneSec._avg_time",
+      "rpc.metrics.checkAndPut.aboveOneSec._num_ops",
+      "rpc.metrics.checkAndPut.aboveOneSec._num_ops",
+      "rpc.metrics.checkOOME.aboveOneSec._num_ops",
+      "rpc.metrics.checkOOME.aboveOneSec._num_ops",
+      "rpc.metrics.checkOOME.aboveOneSec._avg_time",
+      "rpc.metrics.checkOOME.aboveOneSec._avg_time",
+      "rpc.metrics.close.aboveOneSec._avg_time",
+      "rpc.metrics.close.aboveOneSec._avg_time",
+      "rpc.metrics.close.aboveOneSec._num_ops",
+      "rpc.metrics.close.aboveOneSec._num_ops",
+      "rpc.metrics.closeRegion.aboveOneSec._avg_time",
+      "rpc.metrics.closeRegion.aboveOneSec._avg_time",
+      "rpc.metrics.closeRegion.aboveOneSec._num_ops",
+      "rpc.metrics.closeRegion.aboveOneSec._num_ops",
+      "rpc.metrics.compactRegion.aboveOneSec._num_ops",
+      "rpc.metrics.compactRegion.aboveOneSec._num_ops",
+      "rpc.metrics.compactRegion.aboveOneSec._avg_time",
+      "rpc.metrics.compactRegion.aboveOneSec._avg_time",
+      "rpc.metrics.delete.aboveOneSec._num_ops",
+      "rpc.metrics.delete.aboveOneSec._num_ops",
+      "rpc.metrics.delete.aboveOneSec._avg_time",
+      "rpc.metrics.delete.aboveOneSec._avg_time",
+      "rpc.metrics.execCoprocessor.aboveOneSec._avg_time",
+      "rpc.metrics.execCoprocessor.aboveOneSec._avg_time",
+      "rpc.metrics.execCoprocessor.aboveOneSec._num_ops",
+      "rpc.metrics.execCoprocessor.aboveOneSec._num_ops",
+      "rpc.metrics.exists.aboveOneSec._num_ops",
+      "rpc.metrics.exists.aboveOneSec._num_ops",
+      "rpc.metrics.exists.aboveOneSec._avg_time",
+      "rpc.metrics.exists.aboveOneSec._avg_time",
+      "rpc.metrics.flushRegion.aboveOneSec._avg_time",
+      "rpc.metrics.flushRegion.aboveOneSec._avg_time",
+      "rpc.metrics.flushRegion.aboveOneSec._num_ops",
+      "rpc.metrics.flushRegion.aboveOneSec._num_ops",
+      "rpc.metrics.get.aboveOneSec._avg_time",
+      "rpc.metrics.get.aboveOneSec._avg_time",
+      "rpc.metrics.get.aboveOneSec._num_ops",
+      "rpc.metrics.get.aboveOneSec._num_ops",
+      "rpc.metrics.getBlockCacheColumnFamilySummaries.aboveOneSec._num_ops",
+      "rpc.metrics.getBlockCacheColumnFamilySummaries.aboveOneSec._num_ops",
+      "rpc.metrics.getBlockCacheColumnFamilySummaries.aboveOneSec._avg_time",
+      "rpc.metrics.getBlockCacheColumnFamilySummaries.aboveOneSec._avg_time",
+      "rpc.metrics.getCatalogTracker.aboveOneSec._avg_time",
+      "rpc.metrics.getCatalogTracker.aboveOneSec._avg_time",
+      "rpc.metrics.getCatalogTracker.aboveOneSec._num_ops",
+      "rpc.metrics.getCatalogTracker.aboveOneSec._num_ops",
+      "rpc.metrics.getClosestRowBefore.aboveOneSec._avg_time",
+      "rpc.metrics.getClosestRowBefore.aboveOneSec._avg_time",
+      "rpc.metrics.getClosestRowBefore.aboveOneSec._num_ops",
+      "rpc.metrics.getClosestRowBefore.aboveOneSec._num_ops",
+      "rpc.metrics.getConfiguration.aboveOneSec._num_ops",
+      "rpc.metrics.getConfiguration.aboveOneSec._num_ops",
+      "rpc.metrics.getConfiguration.aboveOneSec._avg_time",
+      "rpc.metrics.getConfiguration.aboveOneSec._avg_time",
+      "rpc.metrics.getFromOnlineRegions.aboveOneSec._num_ops",
+      "rpc.metrics.getFromOnlineRegions.aboveOneSec._num_ops",
+      "rpc.metrics.getFromOnlineRegions.aboveOneSec._avg_time",
+      "rpc.metrics.getFromOnlineRegions.aboveOneSec._avg_time",
+      "rpc.metrics.getHServerInfo.aboveOneSec._num_ops",
+      "rpc.metrics.getHServerInfo.aboveOneSec._num_ops",
+      "rpc.metrics.getHServerInfo.aboveOneSec._avg_time",
+      "rpc.metrics.getHServerInfo.aboveOneSec._avg_time",
+      "rpc.metrics.getOnlineRegions.aboveOneSec._avg_time",
+      "rpc.metrics.getOnlineRegions.aboveOneSec._avg_time",
+      "rpc.metrics.getOnlineRegions.aboveOneSec._num_ops",
+      "rpc.metrics.getOnlineRegions.aboveOneSec._num_ops",
+      "rpc.metrics.getProtocolSignature.aboveOneSec._avg_time",
+      "rpc.metrics.getProtocolSignature.aboveOneSec._avg_time",
+      "rpc.metrics.getProtocolSignature.aboveOneSec._num_ops",
+      "rpc.metrics.getProtocolSignature.aboveOneSec._num_ops",
+      "rpc.metrics.getProtocolVersion.aboveOneSec._num_ops",
+      "rpc.metrics.getProtocolVersion.aboveOneSec._num_ops",
+      "rpc.metrics.getProtocolVersion.aboveOneSec._avg_time",
+      "rpc.metrics.getProtocolVersion.aboveOneSec._avg_time",
+      "rpc.metrics.getRegionInfo.aboveOneSec._num_ops",
+      "rpc.metrics.getRegionInfo.aboveOneSec._num_ops",
+      "rpc.metrics.getRegionInfo.aboveOneSec._avg_time",
+      "rpc.metrics.getRegionInfo.aboveOneSec._avg_time",
+      "rpc.metrics.getServerName.aboveOneSec._num_ops",
+      "rpc.metrics.getServerName.aboveOneSec._num_ops",
+      "rpc.metrics.getServerName.aboveOneSec._avg_time",
+      "rpc.metrics.getServerName.aboveOneSec._avg_time",
+      "rpc.metrics.getZooKeeper.aboveOneSec._num_ops",
+      "rpc.metrics.getZooKeeper.aboveOneSec._num_ops",
+      "rpc.metrics.getZooKeeper.aboveOneSec._avg_time",
+      "rpc.metrics.getZooKeeper.aboveOneSec._avg_time",
+      "rpc.metrics.increment.aboveOneSec._num_ops",
+      "rpc.metrics.increment.aboveOneSec._num_ops",
+      "rpc.metrics.increment.aboveOneSec._avg_time",
+      "rpc.metrics.increment.aboveOneSec._avg_time",
+      "rpc.metrics.incrementColumnValue.aboveOneSec._avg_time",
+      "rpc.metrics.incrementColumnValue.aboveOneSec._avg_time",
+      "rpc.metrics.incrementColumnValue.aboveOneSec._num_ops",
+      "rpc.metrics.incrementColumnValue.aboveOneSec._num_ops",
+      "rpc.metrics.isAborted.aboveOneSec._avg_time",
+      "rpc.metrics.isAborted.aboveOneSec._avg_time",
+      "rpc.metrics.isAborted.aboveOneSec._num_ops",
+      "rpc.metrics.isAborted.aboveOneSec._num_ops",
+      "rpc.metrics.isStopped.aboveOneSec._avg_time",
+      "rpc.metrics.isStopped.aboveOneSec._avg_time",
+      "rpc.metrics.isStopped.aboveOneSec._num_ops",
+      "rpc.metrics.isStopped.aboveOneSec._num_ops",
+      "rpc.metrics.lockRow.aboveOneSec._num_ops",
+      "rpc.metrics.lockRow.aboveOneSec._num_ops",
+      "rpc.metrics.lockRow.aboveOneSec._avg_time",
+      "rpc.metrics.lockRow.aboveOneSec._avg_time",
+      "rpc.metrics.multi.aboveOneSec._num_ops",
+      "rpc.metrics.multi.aboveOneSec._num_ops",
+      "rpc.metrics.multi.aboveOneSec._avg_time",
+      "rpc.metrics.multi.aboveOneSec._avg_time",
+      "rpc.metrics.next.aboveOneSec._num_ops",
+      "rpc.metrics.next.aboveOneSec._num_ops",
+      "rpc.metrics.next.aboveOneSec._avg_time",
+      "rpc.metrics.next.aboveOneSec._avg_time",
+      "rpc.metrics.openRegion.aboveOneSec._avg_time",
+      "rpc.metrics.openRegion.aboveOneSec._avg_time",
+      "rpc.metrics.openRegion.aboveOneSec._num_ops",
+      "rpc.metrics.openRegion.aboveOneSec._num_ops",
+      "rpc.metrics.openRegions.aboveOneSec._num_ops",
+      "rpc.metrics.openRegions.aboveOneSec._num_ops",
+      "rpc.metrics.openRegions.aboveOneSec._avg_time",
+      "rpc.metrics.openRegions.aboveOneSec._avg_time",
+      "rpc.metrics.openScanner.aboveOneSec._num_ops",
+      "rpc.metrics.openScanner.aboveOneSec._num_ops",
+      "rpc.metrics.openScanner.aboveOneSec._avg_time",
+      "rpc.metrics.openScanner.aboveOneSec._avg_time",
+      "rpc.metrics.put.aboveOneSec._num_ops",
+      "rpc.metrics.put.aboveOneSec._num_ops",
+      "rpc.metrics.put.aboveOneSec._avg_time",
+      "rpc.metrics.put.aboveOneSec._avg_time",
+      "rpc.metrics.removeFromOnlineRegions.aboveOneSec._avg_time",
+      "rpc.metrics.removeFromOnlineRegions.aboveOneSec._avg_time",
+      "rpc.metrics.removeFromOnlineRegions.aboveOneSec._num_ops",
+      "rpc.metrics.removeFromOnlineRegions.aboveOneSec._num_ops",
+      "rpc.metrics.replicateLogEntries.aboveOneSec._avg_time",
+      "rpc.metrics.replicateLogEntries.aboveOneSec._avg_time",
+      "rpc.metrics.replicateLogEntries.aboveOneSec._num_ops",
+      "rpc.metrics.replicateLogEntries.aboveOneSec._num_ops",
+      "rpc.metrics.rollHLogWriter.aboveOneSec._avg_time",
+      "rpc.metrics.rollHLogWriter.aboveOneSec._avg_time",
+      "rpc.metrics.rollHLogWriter.aboveOneSec._num_ops",
+      "rpc.metrics.rollHLogWriter.aboveOneSec._num_ops",
+      "rpc.metrics.splitRegion.aboveOneSec._avg_time",
+      "rpc.metrics.splitRegion.aboveOneSec._avg_time",
+      "rpc.metrics.splitRegion.aboveOneSec._num_ops",
+      "rpc.metrics.splitRegion.aboveOneSec._num_ops",
+      "rpc.metrics.stop.aboveOneSec._num_ops",
+      "rpc.metrics.stop.aboveOneSec._num_ops",
+      "rpc.metrics.stop.aboveOneSec._avg_time",
+      "rpc.metrics.stop.aboveOneSec._avg_time",
+      "rpc.metrics.unlockRow.aboveOneSec._num_ops",
+      "rpc.metrics.unlockRow.aboveOneSec._num_ops",
+      "rpc.metrics.unlockRow.aboveOneSec._avg_time",
+      "rpc.metrics.unlockRow.aboveOneSec._avg_time"
+  };
+}

+ 33 - 7
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java

@@ -34,6 +34,7 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
+import java.util.List;
 import java.util.Map;
 import java.util.NoSuchElementException;
 import java.util.Set;
@@ -53,12 +54,26 @@ public class ClusterControllerImpl implements ClusterController {
    */
   private final Map<Resource.Type, ResourceProvider> resourceProviders;
 
+  /**
+   * Map of property provider lists keyed by resource type.
+   */
+  private final Map<Resource.Type, List<PropertyProvider>> propertyProviders;
+
+  /**
+   * Map of schemas keyed by resource type.
+   */
+  private final Map<Resource.Type, Schema> schemas;
+
 
   // ----- Constructors ------------------------------------------------------
 
   public ClusterControllerImpl(ProviderModule providerModule) {
     this.providerModule = providerModule;
-    this.resourceProviders = getResourceSchemas();
+    resourceProviders   = new HashMap<Resource.Type, ResourceProvider>();
+    propertyProviders   = new HashMap<Resource.Type, List<PropertyProvider>>();
+    schemas             = new HashMap<Resource.Type, Schema>();
+
+    setProviders();
   }
 
 
@@ -81,7 +96,16 @@ public class ClusterControllerImpl implements ClusterController {
 
   @Override
   public Schema getSchema(Resource.Type type) {
-    return resourceProviders.get(type).getSchema();
+    Schema schema;
+
+    synchronized (schemas) {
+      schema = schemas.get(type);
+      if (schema == null) {
+        schema = new SchemaImpl(resourceProviders.get(type), propertyProviders.get(type));
+        schemas.put(Resource.Type.Cluster, schema);
+      }
+    }
+    return schema;
   }
 
   @Override
@@ -117,7 +141,7 @@ public class ClusterControllerImpl implements ClusterController {
                                           Predicate predicate) throws AmbariException{
     Set<Resource> keepers = resources;
 
-    for (PropertyProvider propertyProvider : resourceProviders.get(type).getPropertyProviders()) {
+    for (PropertyProvider propertyProvider : propertyProviders.get(type)) {
       if (providesRequestProperties(propertyProvider, request, predicate)) {
         keepers = propertyProvider.populateResources(keepers, request, predicate);
       }
@@ -146,16 +170,18 @@ public class ClusterControllerImpl implements ClusterController {
     return !requestPropertyIds.isEmpty();
   }
 
-  private Map<Resource.Type, ResourceProvider> getResourceSchemas() {
-    Map<Resource.Type, ResourceProvider> resourceProviders = new HashMap<Resource.Type, ResourceProvider>();
-
+  private void setProviders() {
     resourceProviders.put(Resource.Type.Cluster, providerModule.getResourceProvider(Resource.Type.Cluster));
     resourceProviders.put(Resource.Type.Service, providerModule.getResourceProvider(Resource.Type.Service));
     resourceProviders.put(Resource.Type.Host, providerModule.getResourceProvider(Resource.Type.Host));
     resourceProviders.put(Resource.Type.Component, providerModule.getResourceProvider(Resource.Type.Component));
     resourceProviders.put(Resource.Type.HostComponent, providerModule.getResourceProvider(Resource.Type.HostComponent));
 
-    return resourceProviders;
+    propertyProviders.put(Resource.Type.Cluster, providerModule.getPropertyProviders(Resource.Type.Cluster));
+    propertyProviders.put(Resource.Type.Service, providerModule.getPropertyProviders(Resource.Type.Service));
+    propertyProviders.put(Resource.Type.Host, providerModule.getPropertyProviders(Resource.Type.Host));
+    propertyProviders.put(Resource.Type.Component, providerModule.getPropertyProviders(Resource.Type.Component));
+    propertyProviders.put(Resource.Type.HostComponent, providerModule.getPropertyProviders(Resource.Type.HostComponent));
   }
 
 

+ 148 - 13
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java

@@ -18,28 +18,163 @@
 
 package org.apache.ambari.server.controller.internal;
 
-import java.util.LinkedList;
-import java.util.List;
-
-import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.controller.AmbariServer;
-import org.apache.ambari.server.controller.spi.PropertyProvider;
+import org.apache.ambari.server.controller.ganglia.GangliaPropertyProvider;
+import org.apache.ambari.server.controller.jmx.JMXPropertyProvider;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.PropertyId;
 import org.apache.ambari.server.controller.spi.ProviderModule;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.utilities.PredicateBuilder;
+import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.spi.PropertyProvider;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.ResourceProvider;
-import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.type.TypeReference;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
 /**
- *
+ * The default provider module implementation.
  */
 public class DefaultProviderModule implements ProviderModule {
 
-  private static final List<PropertyProvider> PROPERTY_PROVIDERS =
-      new LinkedList<PropertyProvider>();
- 
+  private static final PropertyId HOST_ATTRIBUTES_PROPERTY_ID               = PropertyHelper.getPropertyId("attributes", "Hosts");
+  private static final PropertyId HOST_COMPONENT_HOST_NAME_PROPERTY_ID      = PropertyHelper.getPropertyId("host_name", "HostRoles");
+  private static final PropertyId HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID = PropertyHelper.getPropertyId("component_name", "HostRoles");
+
+  /**
+   * The map of resource providers.
+   */
+  private final Map<Resource.Type, ResourceProvider> resourceProviders = new HashMap<Resource.Type, ResourceProvider>();
+
+  /**
+   * The map of lists of property providers.
+   */
+  private final Map<Resource.Type,List<PropertyProvider>> propertyProviders = new HashMap<Resource.Type, List<PropertyProvider>>();
+
+  /**
+   * The map of hosts.
+   */
+  private Map<String, String> hostMapping;
+
+  /**
+   * The host name of the Ganglia collector.
+   */
+  private String gangliaCollectorHostName;
+
+
+  // ----- Constructors ------------------------------------------------------
+
+  /**
+   * Create a default provider module.
+   */
+  public DefaultProviderModule() {
+    AmbariManagementController managementController = AmbariServer.getController();
+
+    // First create all of the resource providers...
+    for (Resource.Type type : Resource.Type.values()){
+      createResourceProvider(type, managementController);
+    }
+
+    // ... then create the things needed to create the property providers ...
+    try {
+      hostMapping              = getHostMap();
+      gangliaCollectorHostName = getGangliaCollectorHost();
+    } catch (AmbariException e) {
+      // TODO ...
+    }
+
+    // ... then create all of the property providers
+    for (Resource.Type type : Resource.Type.values()){
+      createPropertyProviders(type);
+    }
+  }
+
+
+  // ----- ProviderModule ----------------------------------------------------
+
   @Override
   public ResourceProvider getResourceProvider(Resource.Type type) {
-    return ResourceProviderImpl.getResourceProvider(type,
-        PROPERTY_PROVIDERS, PropertyHelper.getPropertyIds(type, "DB"),
-        PropertyHelper.getKeyPropertyIds(type), AmbariServer.getController());
+    return resourceProviders.get(type);
+  }
+
+  @Override
+  public List<PropertyProvider> getPropertyProviders(Resource.Type type) {
+    return propertyProviders.get(type);
+  }
+
+
+  // ----- utility methods ---------------------------------------------------
+
+  private void createResourceProvider(Resource.Type type, AmbariManagementController managementController) {
+    resourceProviders.put( type , ResourceProviderImpl.getResourceProvider(
+        type,
+        PropertyHelper.getPropertyIds(type, "DB"),
+        PropertyHelper.getKeyPropertyIds(type),
+        managementController));
   }
+
+  private void createPropertyProviders(Resource.Type type) {
+    List<PropertyProvider> providers = new LinkedList<PropertyProvider>();
+    if (type == Resource.Type.HostComponent) {
+      providers.add(new JMXPropertyProvider(
+          PropertyHelper.getPropertyIds(type, "JMX"),
+          new URLStreamProvider(),
+          hostMapping));
+
+      providers.add(new GangliaPropertyProvider(
+          PropertyHelper.getPropertyIds(type, "GANGLIA"),
+          new URLStreamProvider(),
+          gangliaCollectorHostName));
+    }
+    propertyProviders.put(type, providers);
+  }
+
+  public Map<String, String> getHostMap() throws AmbariException {
+    Map<String, String> hostMap      = new HashMap<String, String>();
+    ResourceProvider    hostProvider = getResourceProvider(Resource.Type.Host);
+    ObjectMapper        mapper       = new ObjectMapper();
+    Request             request      = PropertyHelper.getReadRequest(Collections.singleton(HOST_ATTRIBUTES_PROPERTY_ID));
+
+    Set<Resource> hosts = hostProvider.getResources(request, null);
+    for (Resource host : hosts) {
+      String attributes = host.getPropertyValue(HOST_ATTRIBUTES_PROPERTY_ID);
+      if (attributes != null && !attributes.startsWith("[]")) {
+        try {
+          Map<String, String> attributeMap = mapper.readValue(attributes, new TypeReference<Map<String, String>>() {});
+          hostMap.put(attributeMap.get("privateFQDN"), attributeMap.get("publicFQDN"));
+        } catch (IOException e) {
+          throw new IllegalStateException("Can't read hosts " + attributes, e);
+        }
+      }
+    }
+    return hostMap;
+  }
+
+  public String getGangliaCollectorHost() throws AmbariException {
+    ResourceProvider provider = getResourceProvider(Resource.Type.HostComponent);
+    Request          request  = PropertyHelper.getReadRequest(Collections.singleton(HOST_COMPONENT_HOST_NAME_PROPERTY_ID));
+
+    Predicate predicate = new PredicateBuilder().property(HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID).
+        equals("GANGLIA_MONITOR_SERVER").toPredicate();
+
+    Set<Resource> hostComponents = provider.getResources(request, predicate);
+    for (Resource hostComponent : hostComponents) {
+      String hostName = hostComponent.getPropertyValue(HOST_COMPONENT_HOST_NAME_PROPERTY_ID);
+      return hostMapping.get(hostName);
+    }
+
+    return null;
+  }
+
 }

+ 19 - 1
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestImpl.java

@@ -77,6 +77,24 @@ public class RequestImpl implements Request {
 
   @Override
   public TemporalInfo getTemporalInfo(PropertyId id) {
-    return null;  //TODO
+    return new TemporalInfoImpl();
+  }
+
+
+  public static class TemporalInfoImpl implements TemporalInfo {
+    @Override
+    public Long getStartTime() {
+      return null;  //TODO
+    }
+
+    @Override
+    public Long getEndTime() {
+      return null;  //TODO
+    }
+
+    @Override
+    public Long getStep() {
+      return null;  //TODO
+    }
   }
 }

+ 50 - 31
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ResourceProviderImpl.java

@@ -44,6 +44,7 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -69,9 +70,9 @@ public abstract class ResourceProviderImpl implements ResourceProvider {
   private final AmbariManagementController managementController;
 
   /**
-   * The schema for this provider's resource type.
+   * Key property mapping by resource type.
    */
-  private final Schema schema;
+  private final Map<Resource.Type, PropertyId> keyPropertyIds;
 
 
   // ----- Property ID constants ---------------------------------------------
@@ -112,16 +113,16 @@ public abstract class ResourceProviderImpl implements ResourceProvider {
    * Create a  new resource provider for the given management controller.
    *
    * @param propertyIds           the property ids
+   * @param keyPropertyIds        the key property ids
    * @param managementController  the management controller
    */
-  private ResourceProviderImpl(List<PropertyProvider> propertyProviders,
-                               Set<PropertyId> propertyIds,
+  private ResourceProviderImpl(Set<PropertyId> propertyIds,
                                Map<Resource.Type, PropertyId> keyPropertyIds,
                                AmbariManagementController managementController) {
-    this.propertyProviders    = propertyProviders;
+    this.propertyProviders    = new LinkedList<PropertyProvider>();
     this.propertyIds          = propertyIds;
+    this.keyPropertyIds       = keyPropertyIds;
     this.managementController = managementController;
-    this.schema               = new SchemaImpl(this, keyPropertyIds);
   }
 
 
@@ -133,16 +134,10 @@ public abstract class ResourceProviderImpl implements ResourceProvider {
   }
 
   @Override
-  public List<PropertyProvider> getPropertyProviders() {
-    return propertyProviders;
+  public Map<Resource.Type, PropertyId> getKeyPropertyIds() {
+    return keyPropertyIds;
   }
 
-  @Override
-  public Schema getSchema() {
-    return schema;
-  }
-
-
   // ----- accessors ---------------------------------------------------------
 
   /**
@@ -154,12 +149,21 @@ public abstract class ResourceProviderImpl implements ResourceProvider {
     return managementController;
   }
 
+  /**
+   * Add a property provider
+   * @param propertyProvider
+   */
+  public void addPropertyProvider(PropertyProvider propertyProvider) {
+    propertyProviders.add(propertyProvider);
+  }
 
   // ----- utility methods ---------------------------------------------------
 
   protected abstract Set<PropertyId> getPKPropertyIds();
 
-  protected Set<Map<PropertyId, String>> getPropertyMaps(Map<PropertyId, String> requestPropertyMap, Predicate predicate) throws AmbariException{
+  protected Set<Map<PropertyId, String>> getPropertyMaps(Map<PropertyId, String> requestPropertyMap,
+                                                         Predicate predicate)
+      throws AmbariException{
 
     Set<PropertyId>              pkPropertyIds       = getPKPropertyIds();
     Set<Map<PropertyId, String>> properties          = new HashSet<Map<PropertyId, String>>();
@@ -264,6 +268,7 @@ public abstract class ResourceProviderImpl implements ResourceProvider {
   /**
    * Factory method for obtaining a resource provider based on a given type and management controller.
    *
+   *
    * @param type                  the resource type
    * @param propertyIds           the property ids
    * @param managementController  the management controller
@@ -271,21 +276,20 @@ public abstract class ResourceProviderImpl implements ResourceProvider {
    * @return a new resource provider
    */
   public static ResourceProvider getResourceProvider(Resource.Type type,
-                                                     List<PropertyProvider> propertyProviders,
                                                      Set<PropertyId> propertyIds,
                                                      Map<Resource.Type, PropertyId> keyPropertyIds,
                                                      AmbariManagementController managementController) {
     switch (type) {
       case Cluster:
-        return new ClusterResourceProvider(propertyProviders, propertyIds, keyPropertyIds, managementController);
+        return new ClusterResourceProvider(propertyIds, keyPropertyIds, managementController);
       case Service:
-        return new ServiceResourceProvider(propertyProviders, propertyIds, keyPropertyIds, managementController);
+        return new ServiceResourceProvider(propertyIds, keyPropertyIds, managementController);
       case Component:
-        return new ComponentResourceProvider(propertyProviders, propertyIds, keyPropertyIds, managementController);
+        return new ComponentResourceProvider(propertyIds, keyPropertyIds, managementController);
       case Host:
-        return new HostResourceProvider(propertyProviders, propertyIds, keyPropertyIds, managementController);
+        return new HostResourceProvider(propertyIds, keyPropertyIds, managementController);
       case HostComponent:
-        return new HostComponentResourceProvider(propertyProviders, propertyIds, keyPropertyIds, managementController);
+        return new HostComponentResourceProvider(propertyIds, keyPropertyIds, managementController);
     }
     throw new IllegalArgumentException("Unknown type " + type);
   }
@@ -305,10 +309,13 @@ public abstract class ResourceProviderImpl implements ResourceProvider {
      * Create a  new resource provider for the given management controller.
      *
      * @param propertyIds           the property ids
+     * @param keyPropertyIds        the key property ids
      * @param managementController  the management controller
      */
-    private ClusterResourceProvider(List<PropertyProvider> propertyProviders, Set<PropertyId> propertyIds, Map<Resource.Type, PropertyId> keyPropertyIds, AmbariManagementController managementController) {
-      super(propertyProviders, propertyIds, keyPropertyIds, managementController);
+    private ClusterResourceProvider(Set<PropertyId> propertyIds,
+                                    Map<Resource.Type, PropertyId> keyPropertyIds,
+                                    AmbariManagementController managementController) {
+      super(propertyIds, keyPropertyIds, managementController);
     }
 
 // ----- ResourceProvider ------------------------------------------------
@@ -394,10 +401,13 @@ public abstract class ResourceProviderImpl implements ResourceProvider {
      * Create a  new resource provider for the given management controller.
      *
      * @param propertyIds           the property ids
+     * @param keyPropertyIds        the key property ids
      * @param managementController  the management controller
      */
-    private ServiceResourceProvider(List<PropertyProvider> propertyProviders, Set<PropertyId> propertyIds, Map<Resource.Type, PropertyId> keyPropertyIds, AmbariManagementController managementController) {
-      super(propertyProviders, propertyIds, keyPropertyIds, managementController);
+    private ServiceResourceProvider(Set<PropertyId> propertyIds,
+                                    Map<Resource.Type, PropertyId> keyPropertyIds,
+                                    AmbariManagementController managementController) {
+      super(propertyIds, keyPropertyIds, managementController);
     }
 
     // ----- ResourceProvider ------------------------------------------------
@@ -483,10 +493,13 @@ public abstract class ResourceProviderImpl implements ResourceProvider {
      * Create a  new resource provider for the given management controller.
      *
      * @param propertyIds           the property ids
+     * @param keyPropertyIds        the key property ids
      * @param managementController  the management controller
      */
-    private ComponentResourceProvider(List<PropertyProvider> propertyProviders, Set<PropertyId> propertyIds, Map<Resource.Type, PropertyId> keyPropertyIds, AmbariManagementController managementController) {
-      super(propertyProviders, propertyIds, keyPropertyIds, managementController);
+    private ComponentResourceProvider(Set<PropertyId> propertyIds,
+                                      Map<Resource.Type, PropertyId> keyPropertyIds,
+                                      AmbariManagementController managementController) {
+      super(propertyIds, keyPropertyIds, managementController);
     }
 
     // ----- ResourceProvider ------------------------------------------------
@@ -572,10 +585,13 @@ public abstract class ResourceProviderImpl implements ResourceProvider {
      * Create a  new resource provider for the given management controller.
      *
      * @param propertyIds           the property ids
+     * @param keyPropertyIds        the key property ids
      * @param managementController  the management controller
      */
-    private HostResourceProvider(List<PropertyProvider> propertyProviders, Set<PropertyId> propertyIds, Map<Resource.Type, PropertyId> keyPropertyIds, AmbariManagementController managementController) {
-      super(propertyProviders, propertyIds, keyPropertyIds, managementController);
+    private HostResourceProvider(Set<PropertyId> propertyIds,
+                                 Map<Resource.Type, PropertyId> keyPropertyIds,
+                                 AmbariManagementController managementController) {
+      super(propertyIds, keyPropertyIds, managementController);
     }
 
     // ----- ResourceProvider ------------------------------------------------
@@ -667,10 +683,13 @@ public abstract class ResourceProviderImpl implements ResourceProvider {
      * Create a  new resource provider for the given management controller.
      *
      * @param propertyIds           the property ids
+     * @param keyPropertyIds        the key property ids
      * @param managementController  the management controller
      */
-    private HostComponentResourceProvider(List<PropertyProvider> propertyProviders, Set<PropertyId> propertyIds, Map<Resource.Type, PropertyId> keyPropertyIds, AmbariManagementController managementController) {
-      super(propertyProviders, propertyIds, keyPropertyIds, managementController);
+    private HostComponentResourceProvider(Set<PropertyId> propertyIds,
+                                          Map<Resource.Type, PropertyId> keyPropertyIds,
+                                          AmbariManagementController managementController) {
+      super(propertyIds, keyPropertyIds, managementController);
     }
 
     // ----- ResourceProvider ------------------------------------------------

+ 4 - 10
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/SchemaImpl.java

@@ -44,11 +44,6 @@ public class SchemaImpl implements Schema {
    */
   private final List<PropertyProvider> propertyProviders;
 
-  /**
-   * Key property mapping by resource type.
-   */
-  private final Map<Resource.Type, PropertyId> keyPropertyIds;
-
 
   // ----- Constructors ------------------------------------------------------
 
@@ -56,13 +51,12 @@ public class SchemaImpl implements Schema {
    * Create a new schema for the given providers.
    *
    * @param resourceProvider   the resource provider
-   * @param keyPropertyIds     the key property mapping
+   * @param propertyProviders  the property providers
    */
   public SchemaImpl(ResourceProvider resourceProvider,
-                    Map<Resource.Type, PropertyId> keyPropertyIds) {
+                    List<PropertyProvider> propertyProviders) {
     this.resourceProvider = resourceProvider;
-    this.propertyProviders = resourceProvider.getPropertyProviders();
-    this.keyPropertyIds = keyPropertyIds;
+    this.propertyProviders = propertyProviders;
   }
 
 
@@ -70,7 +64,7 @@ public class SchemaImpl implements Schema {
 
   @Override
   public PropertyId getKeyPropertyId(Resource.Type type) {
-    return keyPropertyIds.get(type);
+    return resourceProvider.getKeyPropertyIds().get(type);
   }
 
   @Override

+ 140 - 8
ambari-server/src/main/java/org/apache/ambari/server/controller/jdbc/JDBCProviderModule.java

@@ -18,27 +18,159 @@
 
 package org.apache.ambari.server.controller.jdbc;
 
-import org.apache.ambari.server.controller.spi.ProviderModule;
-import org.apache.ambari.server.controller.utilities.DBHelper;
-import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.controller.ganglia.GangliaPropertyProvider;
+import org.apache.ambari.server.controller.internal.URLStreamProvider;
+import org.apache.ambari.server.controller.jmx.JMXPropertyProvider;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.PropertyId;
 import org.apache.ambari.server.controller.spi.PropertyProvider;
+import org.apache.ambari.server.controller.spi.ProviderModule;
+import org.apache.ambari.server.controller.spi.Request;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.ResourceProvider;
+import org.apache.ambari.server.controller.utilities.DBHelper;
+import org.apache.ambari.server.controller.utilities.PredicateBuilder;
+import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.type.TypeReference;
 
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
 /**
- * Module to plug in the JDBC resource provider.
+ * The default provider module implementation.
  */
 public class JDBCProviderModule implements ProviderModule {
 
-  private static final List<PropertyProvider> PROPERTY_PROVIDERS = new LinkedList<PropertyProvider>();
+  private static final PropertyId HOST_ATTRIBUTES_PROPERTY_ID               = PropertyHelper.getPropertyId("attributes", "Hosts");
+  private static final PropertyId HOST_COMPONENT_HOST_NAME_PROPERTY_ID      = PropertyHelper.getPropertyId("host_name", "HostRoles");
+  private static final PropertyId HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID = PropertyHelper.getPropertyId("component_name", "HostRoles");
+
+  /**
+   * The map of resource providers.
+   */
+  private final Map<Resource.Type, ResourceProvider> resourceProviders = new HashMap<Resource.Type, ResourceProvider>();
+
+  /**
+   * The map of lists of property providers.
+   */
+  private final Map<Resource.Type,List<PropertyProvider>> propertyProviders = new HashMap<Resource.Type, List<PropertyProvider>>();
+
+  /**
+   * The map of hosts.
+   */
+  private Map<String, String> hostMapping;
+
+  /**
+   * The host name of the Ganglia collector.
+   */
+  private String gangliaCollectorHostName;
+
+
+  // ----- Constructors ------------------------------------------------------
+
+  /**
+   * Create a default provider module.
+   */
+  public JDBCProviderModule() {
+    // First create all of the resource providers...
+    for (Resource.Type type : Resource.Type.values()){
+      createResourceProvider(type);
+    }
+
+    // ... then create the things needed to create the property providers ...
+    try {
+      hostMapping              = getHostMap();
+      gangliaCollectorHostName = getGangliaCollectorHost();
+    } catch (AmbariException e) {
+      // TODO ...
+    }
+
+    // ... then create all of the property providers
+    for (Resource.Type type : Resource.Type.values()){
+      createPropertyProviders(type);
+    }
+  }
+
+
+  // ----- ProviderModule ----------------------------------------------------
 
   @Override
   public ResourceProvider getResourceProvider(Resource.Type type) {
-    return new JDBCResourceProvider(DBHelper.CONNECTION_FACTORY, type,
-        PROPERTY_PROVIDERS, PropertyHelper.getPropertyIds(type, "DB"),
-        PropertyHelper.getKeyPropertyIds(type));
+    return resourceProviders.get(type);
   }
+
+  @Override
+  public List<PropertyProvider> getPropertyProviders(Resource.Type type) {
+    return propertyProviders.get(type);
+  }
+
+
+  // ----- utility methods ---------------------------------------------------
+
+  private void createResourceProvider(Resource.Type type) {
+    resourceProviders.put( type, new JDBCResourceProvider(DBHelper.CONNECTION_FACTORY, type,
+        PropertyHelper.getPropertyIds(type, "DB"),
+        PropertyHelper.getKeyPropertyIds(type)));
+  }
+
+  private void createPropertyProviders(Resource.Type type) {
+    List<PropertyProvider> providers = new LinkedList<PropertyProvider>();
+    if (type == Resource.Type.HostComponent) {
+      providers.add(new JMXPropertyProvider(
+          PropertyHelper.getPropertyIds(type, "JMX"),
+          new URLStreamProvider(),
+          hostMapping));
+
+      providers.add(new GangliaPropertyProvider(
+          PropertyHelper.getPropertyIds(type, "GANGLIA"),
+          new URLStreamProvider(),
+          gangliaCollectorHostName));
+    }
+    propertyProviders.put(type, providers);
+  }
+
+  public Map<String, String> getHostMap() throws AmbariException {
+    Map<String, String> hostMap      = new HashMap<String, String>();
+    ResourceProvider    hostProvider = getResourceProvider(Resource.Type.Host);
+    ObjectMapper mapper       = new ObjectMapper();
+    Request request      = PropertyHelper.getReadRequest(Collections.singleton(HOST_ATTRIBUTES_PROPERTY_ID));
+
+    Set<Resource> hosts = hostProvider.getResources(request, null);
+    for (Resource host : hosts) {
+      String attributes = host.getPropertyValue(HOST_ATTRIBUTES_PROPERTY_ID);
+      if (attributes != null && !attributes.startsWith("[]")) {
+        try {
+          Map<String, String> attributeMap = mapper.readValue(attributes, new TypeReference<Map<String, String>>() {});
+          hostMap.put(attributeMap.get("privateFQDN"), attributeMap.get("publicFQDN"));
+        } catch (IOException e) {
+          throw new IllegalStateException("Can't read hosts " + attributes, e);
+        }
+      }
+    }
+    return hostMap;
+  }
+
+  public String getGangliaCollectorHost() throws AmbariException {
+    ResourceProvider provider = getResourceProvider(Resource.Type.HostComponent);
+    Request          request  = PropertyHelper.getReadRequest(Collections.singleton(HOST_COMPONENT_HOST_NAME_PROPERTY_ID));
+
+    Predicate predicate = new PredicateBuilder().property(HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID).
+        equals("GANGLIA_MONITOR_SERVER").toPredicate();
+
+    Set<Resource> hostComponents = provider.getResources(request, predicate);
+    for (Resource hostComponent : hostComponents) {
+      String hostName = hostComponent.getPropertyValue(HOST_COMPONENT_HOST_NAME_PROPERTY_ID);
+      return hostMapping.get(hostName);
+    }
+
+    return null;
+  }
+
 }

+ 7 - 32
ambari-server/src/main/java/org/apache/ambari/server/controller/jdbc/JDBCResourceProvider.java

@@ -20,18 +20,15 @@ package org.apache.ambari.server.controller.jdbc;
 
 import org.apache.ambari.server.controller.internal.PropertyIdImpl;
 import org.apache.ambari.server.controller.internal.ResourceImpl;
-import org.apache.ambari.server.controller.internal.SchemaImpl;
-import org.apache.ambari.server.controller.utilities.PredicateHelper;
-import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.controller.predicate.BasePredicate;
 import org.apache.ambari.server.controller.predicate.PredicateVisitorAcceptor;
 import org.apache.ambari.server.controller.spi.Predicate;
 import org.apache.ambari.server.controller.spi.PropertyId;
-import org.apache.ambari.server.controller.spi.PropertyProvider;
 import org.apache.ambari.server.controller.spi.Request;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.ResourceProvider;
-import org.apache.ambari.server.controller.spi.Schema;
+import org.apache.ambari.server.controller.utilities.PredicateHelper;
+import org.apache.ambari.server.controller.utilities.PropertyHelper;
 
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
@@ -41,7 +38,6 @@ import java.sql.SQLException;
 import java.sql.Statement;
 import java.util.HashMap;
 import java.util.HashSet;
-import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
@@ -56,15 +52,10 @@ public class JDBCResourceProvider implements ResourceProvider {
 
   private final ConnectionFactory connectionFactory;
 
-  /**
-   * The list of property providers for this provider's resource type.
-   */
-  private final List<PropertyProvider> propertyProviders;
-
   /**
    * The schema for this provider's resource type.
    */
-  private final Schema schema;
+  private final Map<Resource.Type, PropertyId> keyPropertyIds;
 
   /**
    * Key mappings used for joins.
@@ -73,29 +64,18 @@ public class JDBCResourceProvider implements ResourceProvider {
 
   public JDBCResourceProvider(ConnectionFactory connectionFactory,
                               Resource.Type type,
-                              List<PropertyProvider> propertyProviders,
                               Set<PropertyId> propertyIds,
                               Map<Resource.Type, PropertyId> keyPropertyIds) {
     this.connectionFactory = connectionFactory;
     this.type = type;
-    this.propertyProviders = propertyProviders;
     this.propertyIds = propertyIds;
-    this.schema = new SchemaImpl(this, keyPropertyIds);
+    this.keyPropertyIds = keyPropertyIds;
   }
 
   @Override
   public Set<Resource> getResources(Request request, Predicate predicate) {
-
     Set<Resource> resources = new HashSet<Resource>();
-    Set<PropertyId> propertyIds = new HashSet<PropertyId>(request.getPropertyIds());
-    if (propertyIds.isEmpty()) {
-      propertyIds.addAll(this.propertyIds);
-    } else {
-      if (predicate != null) {
-        propertyIds.addAll(PredicateHelper.getPropertyIds(predicate));
-      }
-      propertyIds.retainAll(this.propertyIds);
-    }
+    Set<PropertyId> propertyIds = PropertyHelper.getRequestPropertyIds(this.propertyIds, request, predicate);
 
     try {
       Connection connection = connectionFactory.getConnection();
@@ -363,13 +343,8 @@ public class JDBCResourceProvider implements ResourceProvider {
   }
 
   @Override
-  public List<PropertyProvider> getPropertyProviders() {
-    return propertyProviders;
-  }
-
-  @Override
-  public Schema getSchema() {
-    return schema;
+  public Map<Resource.Type, PropertyId> getKeyPropertyIds() {
+    return keyPropertyIds;
   }
 
   /**

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/controller/jmx/JMXMetrics.java → ambari-server/src/main/java/org/apache/ambari/server/controller/jmx/JMXMetricHolder.java

@@ -24,7 +24,7 @@ import java.util.Map;
 /**
  *
  */
-public class JMXMetrics {
+public class JMXMetricHolder {
 
   private List<Map<String, String>> beans;
 

+ 21 - 25
ambari-server/src/main/java/org/apache/ambari/server/controller/jmx/JMXPropertyProvider.java

@@ -45,13 +45,13 @@ public class JMXPropertyProvider implements PropertyProvider {
   private static final String CATEGORY_KEY = "tag.context";
 
   /**
-   * Map of property ids supported by this provider.
+   * Set of property ids supported by this provider.
    */
   private final Set<PropertyId> propertyIds;
 
   private final StreamProvider streamProvider;
 
-  private final HostMappingProvider mappingProvider;
+  private final Map<String, String> hostMapping;
 
   private static final Map<String, String> JMX_PORTS = new HashMap<String, String>();
 
@@ -71,14 +71,14 @@ public class JMXPropertyProvider implements PropertyProvider {
    *
    * @param propertyIds     the property ids provided by this provider
    * @param streamProvider  the stream provider
-   * @param mappingProvider the provider of host mapping information
+   * @param hostMapping     the host mapping
    */
   public JMXPropertyProvider(Set<PropertyId> propertyIds,
                               StreamProvider streamProvider,
-                              HostMappingProvider mappingProvider) {
-    this.propertyIds = propertyIds;
+                              Map<String, String> hostMapping) {
+    this.propertyIds    = propertyIds;
     this.streamProvider = streamProvider;
-    this.mappingProvider = mappingProvider;
+    this.hostMapping    = hostMapping;
   }
 
 
@@ -128,38 +128,34 @@ public class JMXPropertyProvider implements PropertyProvider {
 
     Set<PropertyId> ids = PropertyHelper.getRequestPropertyIds(getPropertyIds(), request, predicate);
 
-    Map<String, String> hosts = mappingProvider.getHostMap();
-
-    String hostName = hosts.get(PropertyHelper.fixHostName(resource.getPropertyValue(HOST_COMPONENT_HOST_NAME_PROPERTY_ID)));
-    String port = JMX_PORTS.get(resource.getPropertyValue(HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID));
+    String hostName = hostMapping.get(resource.getPropertyValue(HOST_COMPONENT_HOST_NAME_PROPERTY_ID));
+    String port     = JMX_PORTS.get(resource.getPropertyValue(HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID));
 
     if (hostName == null || port == null) {
       return true;
     }
 
-    JMXMetrics metrics;
-
     String spec = getSpec(hostName + ":" + port);
 
     try {
-      metrics = new ObjectMapper().readValue(streamProvider.readFrom(spec), JMXMetrics.class);
-    } catch (IOException e) {
-      throw new AmbariException("Can't get metrics : " + spec, e);
-    }
+      JMXMetricHolder metricHolder = new ObjectMapper().readValue(streamProvider.readFrom(spec), JMXMetricHolder.class);
+      for (Map<String, String> propertyMap : metricHolder.getBeans()) {
+        String category = propertyMap.get(CATEGORY_KEY);
+        if (category != null) {
+          for (Map.Entry<String, String> entry : propertyMap.entrySet()) {
 
-    for (Map<String, String> propertyMap : metrics.getBeans()) {
-      String category = propertyMap.get(CATEGORY_KEY);
-      if (category != null) {
-        for (Map.Entry<String, String> entry : propertyMap.entrySet()) {
+            PropertyId propertyId = PropertyHelper.getPropertyId(entry.getKey(), category);
 
-          PropertyId propertyId = PropertyHelper.getPropertyId(entry.getKey(), category);
-
-          if (ids.contains(propertyId)) {
-            resource.setProperty(propertyId, entry.getValue());
+            if (ids.contains(propertyId)) {
+              resource.setProperty(propertyId, entry.getValue());
+            }
           }
         }
       }
+    } catch (IOException e) {
+      throw new AmbariException("Can't get metrics : " + spec, e);
     }
+
     return true;
   }
 
@@ -170,7 +166,7 @@ public class JMXPropertyProvider implements PropertyProvider {
    *
    * @return the spec
    */
-  protected static String getSpec(String jmxSource) {
+  protected String getSpec(String jmxSource) {
     return "http://" + jmxSource + "/jmx?qry=Hadoop:*";
   }
 }

+ 4 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/spi/ProviderModule.java

@@ -18,6 +18,8 @@
 
 package org.apache.ambari.server.controller.spi;
 
+import java.util.List;
+
 /**
  *  Interface to allow the plugging in of resource adapters.
  */
@@ -30,4 +32,6 @@ public interface ProviderModule {
    * @return the resource adapter
    */
   public ResourceProvider getResourceProvider(Resource.Type type);
+
+  public List<PropertyProvider> getPropertyProviders(Resource.Type type);
 }

+ 3 - 3
ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Request.java

@@ -67,7 +67,7 @@ public interface Request {
      *
      * @return the start time in seconds
      */
-    public long getStartTime();
+    public Long getStartTime();
 
     /**
      * Get the end of the requested time range.  The time is given in
@@ -75,7 +75,7 @@ public interface Request {
      *
      * @return the end time in seconds
      */
-    public long getEndTime();
+    public Long getEndTime();
 
     /**
      * Get the requested time between each data point of the temporal
@@ -83,6 +83,6 @@ public interface Request {
      *
      * @return the step time in seconds
      */
-    public long getStep();
+    public Long getStep();
   }
 }

+ 2 - 16
ambari-server/src/main/java/org/apache/ambari/server/controller/spi/ResourceProvider.java

@@ -20,6 +20,7 @@ package org.apache.ambari.server.controller.spi;
 import org.apache.ambari.server.AmbariException;
 
 import java.util.List;
+import java.util.Map;
 import java.util.Set;
 
 /**
@@ -97,20 +98,5 @@ public interface ResourceProvider {
    */
   public Set<PropertyId> getPropertyIds();
 
-  /**
-   * Get the list of property providers for the resource associated with this provider's
-   * resource type.
-   *
-   * @return the list of property providers
-   */
-  public List<PropertyProvider> getPropertyProviders();
-
-  /**
-   * Get the {@link Schema schema} for this provider's resource type.  The schema
-   * for a given resource type describes the properties and categories provided
-   * by that type of resource.
-   *
-   * @return the schema object
-   */
-  public Schema getSchema();
+  public Map<Resource.Type, PropertyId> getKeyPropertyIds();
 }

+ 19 - 3
ambari-server/src/main/java/org/apache/ambari/server/controller/utilities/PropertyHelper.java

@@ -21,10 +21,8 @@ import org.apache.ambari.server.controller.internal.PropertyIdImpl;
 import org.apache.ambari.server.controller.internal.RequestImpl;
 import org.apache.ambari.server.controller.spi.Predicate;
 import org.apache.ambari.server.controller.spi.PropertyId;
-import org.apache.ambari.server.controller.spi.PropertyProvider;
 import org.apache.ambari.server.controller.spi.Request;
 import org.apache.ambari.server.controller.spi.Resource;
-import org.apache.ambari.server.controller.spi.ResourceProvider;
 import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jackson.type.TypeReference;
 
@@ -32,6 +30,7 @@ import java.io.IOException;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.Iterator;
 import java.util.Map;
 import java.util.Set;
 
@@ -67,6 +66,13 @@ public class PropertyHelper {
     return KEY_PROPERTY_IDS.get(resourceType);
   }
 
+  /**
+   * Get a map of all the property values keyed by property id for the given resource.
+   *
+   * @param resource  the resource
+   *
+   * @return the map of properties for the given resource
+   */
   public static Map<PropertyId, String> getProperties(Resource resource) {
     Map<PropertyId, String> properties = new HashMap<PropertyId, String>();
 
@@ -93,11 +99,21 @@ public class PropertyHelper {
   public static Set<PropertyId> getRequestPropertyIds(Set<PropertyId> providerPropertyIds,
                                                       Request request,
                                                       Predicate predicate) {
-    Set<PropertyId> requestPropertyIds  = new HashSet<PropertyId>(request.getPropertyIds());
+    Set<PropertyId> propertyIds         = request.getPropertyIds();
+    Set<PropertyId> requestPropertyIds  = propertyIds == null ? null : new HashSet<PropertyId>(propertyIds);
 
     providerPropertyIds = new HashSet<PropertyId>(providerPropertyIds);
 
+    // if no properties are specified, then return them all
     if (requestPropertyIds == null || requestPropertyIds.isEmpty()) {
+//      // strip out the temporal properties, they must be asked for explicitly
+//      Iterator<PropertyId> iter = providerPropertyIds.iterator();
+//      while (iter.hasNext()) {
+//        PropertyId propertyId = iter.next();
+//        if (propertyId.isTemporal()) {
+//          iter.remove();
+//        }
+//      }
       return providerPropertyIds;
     }
 

+ 25 - 0
ambari-server/src/main/resources/properties.json

@@ -77,6 +77,11 @@
         "category":"Hosts",
         "temporal":false
       },
+      {
+        "name":"attributes",
+        "category":"Hosts",
+        "temporal":false
+      },
       {
         "name":"total_mem",
         "category":"Hosts",
@@ -413,6 +418,26 @@
 
     ],
     "GANGLIA":[
+      {
+        "name":"bytes_out",
+        "category":"network",
+        "temporal":true
+      },
+      {
+        "name":"bytes_in",
+        "category":"network",
+        "temporal":true
+      },
+      {
+        "name":"pkts_out",
+        "category":"network",
+        "temporal":true
+      },
+      {
+        "name":"pkts_in",
+        "category":"network",
+        "temporal":true
+      }
     ]
   }
 }

+ 1 - 1
ambari-server/src/test/java/org/apache/ambari/server/controller/ganglia/GangliaHelperTest.java

@@ -43,7 +43,7 @@ public class GangliaHelperTest {
 //        String api  = "rpcdetailed.rpcdetailed.sendHeartbeat_num_ops";
     String metric = "cpu_nice";
 
-    List<GangliaMetric> metrics = GangliaHelper.getGangliaMetrics(target, cluster, host, metric, startTime, endTime, step);
+//    List<GangliaMetric> metrics = GangliaHelper.getGangliaMetrics(target, cluster, host, metric, startTime, endTime, step);
 
     //TODO : assertions
   }

+ 36 - 15
ambari-server/src/test/java/org/apache/ambari/server/controller/ganglia/GangliaPropertyProviderTest.java

@@ -17,27 +17,48 @@
  */
 package org.apache.ambari.server.controller.ganglia;
 
-import org.junit.Ignore;
+import org.apache.ambari.server.controller.internal.ResourceImpl;
+import org.apache.ambari.server.controller.spi.PropertyId;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.junit.Assert;
 import org.junit.Test;
 
+import java.util.Collections;
+import java.util.Set;
+
 /**
- *
+ * Test the Ganglia property provider.
  */
 public class GangliaPropertyProviderTest {
 
-  @Ignore
+  private static final PropertyId PROPERTY_ID = PropertyHelper.getPropertyId("bytes_out", "network", true);
+
   @Test
-  public void testGet() throws Exception {
-//        String target  = "ec2-75-101-217-112.compute-1.amazonaws.com";
-//
-//        GangliaPropertyProvider provider = new GangliaPropertyProvider(target);
-//
-//        HostComponentId id = new HostComponentId("domU-12-31-39-0E-41-51.compute-1.internal", "HDPNameNode");
-//
-//        List<String> selectList = new LinkedList<String>();
-//        selectList.add("rpcdetailed.rpcdetailed.sendHeartbeat_num_ops");
-//
-//        provider.get(id, selectList);
-  }
+  public void testGetResources() throws Exception {
+    Set< PropertyId > propertyIds     = PropertyHelper.getPropertyIds(Resource.Type.HostComponent, "GANGLIA");
+    TestStreamProvider streamProvider  = new TestStreamProvider();
+
+    GangliaPropertyProvider propertyProvider = new GangliaPropertyProvider(propertyIds,
+        streamProvider,
+        "ec2-23-23-71-42.compute-1.amazonaws.com");
+
+    // namenode
+    Resource resource = new ResourceImpl(Resource.Type.HostComponent);
 
+    resource.setProperty(GangliaPropertyProvider.HOST_COMPONENT_HOST_NAME_PROPERTY_ID, "domU-12-31-39-0E-34-E1.compute-1.internal");
+    resource.setProperty(GangliaPropertyProvider.HOST_COMPONENT_COMPONENT_NAME_PROPERTY_ID, "NAMENODE");
+
+    // only ask for one property
+    Request  request = PropertyHelper.getReadRequest(Collections.singleton(PROPERTY_ID));
+
+    Assert.assertEquals(1, propertyProvider.populateResources(Collections.singleton(resource), request, null).size());
+
+    Assert.assertEquals("http://ec2-23-23-71-42.compute-1.amazonaws.com/ganglia/graph.php?c=HDPNameNode&h=domU-12-31-39-0E-34-E1.compute-1.internal&m=bytes_out&json=1",
+        streamProvider.getLastSpec());
+
+    Assert.assertEquals(3, PropertyHelper.getProperties(resource).size());
+    Assert.assertNotNull(resource.getPropertyValue(PROPERTY_ID));
+  }
 }

+ 18 - 7
ambari-server/src/main/java/org/apache/ambari/server/controller/jmx/HostMappingProvider.java → ambari-server/src/test/java/org/apache/ambari/server/controller/ganglia/TestStreamProvider.java

@@ -1,3 +1,5 @@
+package org.apache.ambari.server.controller.ganglia;
+
 /**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -16,13 +18,22 @@
  * limitations under the License.
  */
 
-package org.apache.ambari.server.controller.jmx;
+import org.apache.ambari.server.controller.utilities.StreamProvider;
 
-import java.util.Map;
+import java.io.IOException;
+import java.io.InputStream;
 
-/**
- *
- */
-public interface HostMappingProvider {
-  public Map<String, String> getHostMap();
+public class TestStreamProvider implements StreamProvider {
+
+  private String lastSpec;
+
+  @Override
+  public InputStream readFrom(String spec) throws IOException {
+    lastSpec = spec;
+    return ClassLoader.getSystemResourceAsStream("temporal_ganglia.json");
+  }
+
+  public String getLastSpec() {
+    return lastSpec;
+  }
 }

+ 12 - 21
ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterControllerImplTest.java

@@ -30,6 +30,7 @@ import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.ResourceProvider;
 import org.apache.ambari.server.controller.spi.Schema;
 import org.apache.ambari.server.controller.utilities.PredicateBuilder;
+import org.junit.Ignore;
 import org.junit.Test;
 
 import java.util.HashMap;
@@ -131,13 +132,8 @@ public class ClusterControllerImplTest {
     }
 
     @Override
-    public List<PropertyProvider> getPropertyProviders() {
-      return propertyProviders;
-    }
-
-    @Override
-    public Schema getSchema() {
-      return new SchemaImpl(this, keyPropertyIds);
+    public Map<Resource.Type, PropertyId> getKeyPropertyIds() {
+      return keyPropertyIds;
     }
   };
 
@@ -186,16 +182,13 @@ public class ClusterControllerImplTest {
     Assert.assertEquals(2, cnt);
   }
 
+  @Ignore
   @Test
   public void testGetSchema() {
     ProviderModule module = new TestProviderModule();
     ClusterController controller = new ClusterControllerImpl(module);
 
-    Assert.assertSame(module.getResourceProvider(Resource.Type.Host).getSchema(), controller.getSchema(Resource.Type.Host));
-    Assert.assertSame(module.getResourceProvider(Resource.Type.Service).getSchema(), controller.getSchema(Resource.Type.Service));
-    Assert.assertSame(module.getResourceProvider(Resource.Type.Cluster).getSchema(), controller.getSchema(Resource.Type.Cluster));
-    Assert.assertSame(module.getResourceProvider(Resource.Type.Component).getSchema(), controller.getSchema(Resource.Type.Component));
-    Assert.assertSame(module.getResourceProvider(Resource.Type.HostComponent).getSchema(), controller.getSchema(Resource.Type.HostComponent));
+//    Assert.assertEquals(, controller.getSchema(Resource.Type.Host));
   }
 
   private static class TestProviderModule implements ProviderModule {
@@ -213,11 +206,14 @@ public class ClusterControllerImplTest {
     public ResourceProvider getResourceProvider(Resource.Type type) {
       return providers.get(type);
     }
+
+    @Override
+    public List<PropertyProvider> getPropertyProviders(Resource.Type type) {
+      return propertyProviders;
+    }
   }
 
   private static class TestResourceProvider implements ResourceProvider {
-    private Schema schema = new SchemaImpl(this, keyPropertyIds);
-
     @Override
     public Set<Resource> getResources(Request request, Predicate predicate) {
 
@@ -257,13 +253,8 @@ public class ClusterControllerImplTest {
     }
 
     @Override
-    public List<PropertyProvider> getPropertyProviders() {
-      return propertyProviders;
-    }
-
-    @Override
-    public Schema getSchema() {
-      return schema;
+    public Map<Resource.Type, PropertyId> getKeyPropertyIds() {
+      return keyPropertyIds;
     }
   }
 

+ 0 - 9
ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ResourceProviderImplTest.java

@@ -28,7 +28,6 @@ import org.apache.ambari.server.controller.ServiceResponse;
 import org.apache.ambari.server.controller.TrackActionResponse;
 import org.apache.ambari.server.controller.spi.Predicate;
 import org.apache.ambari.server.controller.spi.PropertyId;
-import org.apache.ambari.server.controller.spi.PropertyProvider;
 import org.apache.ambari.server.controller.spi.Request;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.ResourceProvider;
@@ -46,7 +45,6 @@ import static org.easymock.EasyMock.verify;
 import java.util.HashSet;
 import java.util.LinkedHashMap;
 import java.util.LinkedHashSet;
-import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -72,7 +70,6 @@ public class ResourceProviderImplTest {
 
     ResourceProvider provider = ResourceProviderImpl.getResourceProvider(
         type,
-        new LinkedList<PropertyProvider>(),
         PropertyHelper.getPropertyIds(type, "DB"),
         PropertyHelper.getKeyPropertyIds(type),
         managementController);
@@ -140,7 +137,6 @@ public class ResourceProviderImplTest {
 
     ResourceProvider provider = ResourceProviderImpl.getResourceProvider(
         type,
-        new LinkedList<PropertyProvider>(),
         PropertyHelper.getPropertyIds(type, "DB"),
         PropertyHelper.getKeyPropertyIds(type),
         managementController);
@@ -203,7 +199,6 @@ public class ResourceProviderImplTest {
 
     ResourceProvider provider = ResourceProviderImpl.getResourceProvider(
         type,
-        new LinkedList<PropertyProvider>(),
         PropertyHelper.getPropertyIds(type, "DB"),
         PropertyHelper.getKeyPropertyIds(type),
         managementController);
@@ -247,7 +242,6 @@ public class ResourceProviderImplTest {
 
     ResourceProvider provider = ResourceProviderImpl.getResourceProvider(
         type,
-        new LinkedList<PropertyProvider>(),
         PropertyHelper.getPropertyIds(type, "DB"),
         PropertyHelper.getKeyPropertyIds(type),
         managementController);
@@ -279,7 +273,6 @@ public class ResourceProviderImplTest {
 
     ResourceProvider provider = ResourceProviderImpl.getResourceProvider(
         type,
-        new LinkedList<PropertyProvider>(),
         PropertyHelper.getPropertyIds(type, "DB"),
         PropertyHelper.getKeyPropertyIds(type),
         managementController);
@@ -337,7 +330,6 @@ public class ResourceProviderImplTest {
 
     ResourceProvider provider = ResourceProviderImpl.getResourceProvider(
         type,
-        new LinkedList<PropertyProvider>(),
         PropertyHelper.getPropertyIds(type, "DB"),
         PropertyHelper.getKeyPropertyIds(type),
         managementController);
@@ -408,7 +400,6 @@ public class ResourceProviderImplTest {
 
     ResourceProvider provider = ResourceProviderImpl.getResourceProvider(
         type,
-        new LinkedList<PropertyProvider>(),
         PropertyHelper.getPropertyIds(type, "DB"),
         PropertyHelper.getKeyPropertyIds(type),
         managementController);

+ 4 - 9
ambari-server/src/test/java/org/apache/ambari/server/controller/internal/SchemaImplTest.java

@@ -77,13 +77,8 @@ public class SchemaImplTest {
     }
 
     @Override
-    public List<PropertyProvider> getPropertyProviders() {
-      return propertyProviders;
-    }
-
-    @Override
-    public Schema getSchema() {
-      return null;
+    public Map<Resource.Type, PropertyId> getKeyPropertyIds() {
+      return keyPropertyIds;
     }
   };
 
@@ -124,7 +119,7 @@ public class SchemaImplTest {
 
   @Test
   public void testGetKeyPropertyId() {
-    Schema schema = new SchemaImpl(resourceProvider, keyPropertyIds);
+    Schema schema = new SchemaImpl(resourceProvider, propertyProviders);
 
     Assert.assertEquals(PropertyHelper.getPropertyId("p1", "c1"), schema.getKeyPropertyId(Resource.Type.Cluster));
     Assert.assertEquals(PropertyHelper.getPropertyId("p2", "c1"), schema.getKeyPropertyId(Resource.Type.Host));
@@ -133,7 +128,7 @@ public class SchemaImplTest {
 
   @Test
   public void testGetCategories() {
-    Schema schema = new SchemaImpl(resourceProvider, keyPropertyIds);
+    Schema schema = new SchemaImpl(resourceProvider, propertyProviders);
 
     Map<String, Set<String>> categories = schema.getCategories();
     Assert.assertEquals(4, categories.size());

+ 36 - 21
ambari-server/src/test/java/org/apache/ambari/server/controller/internal/TestProviderModule.java

@@ -18,11 +18,10 @@
 
 package org.apache.ambari.server.controller.internal;
 
+import org.apache.ambari.server.controller.ganglia.GangliaPropertyProvider;
 import org.apache.ambari.server.controller.jdbc.TestJDBCResourceProvider;
 import org.apache.ambari.server.controller.jmx.JMXPropertyProvider;
 import org.apache.ambari.server.controller.jmx.TestHostMappingProvider;
-import org.apache.ambari.server.controller.jmx.TestStreamProvider;
-import org.apache.ambari.server.controller.spi.PropertyId;
 import org.apache.ambari.server.controller.spi.PropertyProvider;
 import org.apache.ambari.server.controller.spi.ProviderModule;
 import org.apache.ambari.server.controller.spi.Resource;
@@ -30,43 +29,59 @@ import org.apache.ambari.server.controller.spi.ResourceProvider;
 import org.apache.ambari.server.controller.utilities.DBHelper;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
 
-import java.util.Collections;
 import java.util.HashMap;
+import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
 
 /**
  * Module to plug in the JDBC resource provider.
  */
 public class TestProviderModule implements ProviderModule {
-
+  private static final Map<Resource.Type, ResourceProvider> resourceProviders = new HashMap<Resource.Type, ResourceProvider>();
   private static final Map<Resource.Type, List<PropertyProvider>> propertyProviders = new HashMap<Resource.Type, List<PropertyProvider>>();
 
   static {
 
-    Set< PropertyId > propertyIds           = PropertyHelper.getPropertyIds(Resource.Type.HostComponent, "JMX");
-    TestStreamProvider streamProvider       = new TestStreamProvider();
-    TestHostMappingProvider mappingProvider = new TestHostMappingProvider();
+    for (Resource.Type type : Resource.Type.values()) {
+      resourceProviders.put(type, new TestJDBCResourceProvider(
+          DBHelper.CONNECTION_FACTORY,
+          type,
+          PropertyHelper.getPropertyIds(type, "DB"),
+          PropertyHelper.getKeyPropertyIds(type)));
+    }
+
+    propertyProviders.put(Resource.Type.Cluster, new LinkedList<PropertyProvider>());
+    propertyProviders.put(Resource.Type.Service, new LinkedList<PropertyProvider>());
+    propertyProviders.put(Resource.Type.Component, new LinkedList<PropertyProvider>());
+    propertyProviders.put(Resource.Type.Host, new LinkedList<PropertyProvider>());
+
+    List<PropertyProvider> providers = new LinkedList<PropertyProvider>();
+    Map<String, String>    hostMap   = TestHostMappingProvider.getHostMap();
+
+    PropertyProvider propertyProvider = new JMXPropertyProvider(
+        PropertyHelper.getPropertyIds(Resource.Type.HostComponent, "JMX"),
+        new org.apache.ambari.server.controller.jmx.TestStreamProvider(),
+        hostMap);
+    providers.add(propertyProvider);
+
 
-    PropertyProvider propertyProvider = new JMXPropertyProvider(propertyIds,
-        streamProvider,
-        mappingProvider);
+    propertyProvider = new GangliaPropertyProvider(
+        PropertyHelper.getPropertyIds(Resource.Type.HostComponent, "GANGLIA"),
+        new org.apache.ambari.server.controller.ganglia.TestStreamProvider(),
+        "ec2-23-23-71-42.compute-1.amazonaws.com");
+    providers.add(propertyProvider);
 
-    propertyProviders.put(Resource.Type.HostComponent, Collections.singletonList(propertyProvider));
+    propertyProviders.put(Resource.Type.HostComponent, providers);
   }
 
   @Override
   public ResourceProvider getResourceProvider(Resource.Type type) {
+    return resourceProviders.get(type);
+  }
 
-    List<PropertyProvider> providers = propertyProviders.get(type);
-
-
-    return new TestJDBCResourceProvider(
-        DBHelper.CONNECTION_FACTORY,
-        type,
-        providers == null ? Collections.<PropertyProvider>emptyList() : providers,
-        PropertyHelper.getPropertyIds(type, "DB"),
-        PropertyHelper.getKeyPropertyIds(type));
+  @Override
+  public List<PropertyProvider> getPropertyProviders(Resource.Type type) {
+    return propertyProviders.get(type);
   }
 }

+ 2 - 9
ambari-server/src/test/java/org/apache/ambari/server/controller/jdbc/TestJDBCResourceProvider.java

@@ -19,10 +19,8 @@
 package org.apache.ambari.server.controller.jdbc;
 
 import org.apache.ambari.server.controller.spi.PropertyId;
-import org.apache.ambari.server.controller.spi.PropertyProvider;
 import org.apache.ambari.server.controller.spi.Resource;
 
-import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
@@ -30,12 +28,7 @@ import java.util.Set;
  *
  */
 public class TestJDBCResourceProvider extends JDBCResourceProvider{
-  public TestJDBCResourceProvider(ConnectionFactory connectionFactory, Resource.Type type, List<PropertyProvider> propertyProviders, Set<PropertyId> propertyIds, Map<Resource.Type, PropertyId> keyPropertyIds) {
-    super(connectionFactory, type, propertyProviders, propertyIds, keyPropertyIds);
-  }
-
-  @Override
-  public List<PropertyProvider> getPropertyProviders() {
-    return super.getPropertyProviders();
+  public TestJDBCResourceProvider(ConnectionFactory connectionFactory, Resource.Type type, Set<PropertyId> propertyIds, Map<Resource.Type, PropertyId> keyPropertyIds) {
+    super(connectionFactory, type, propertyIds, keyPropertyIds);
   }
 }

+ 9 - 9
ambari-server/src/test/java/org/apache/ambari/server/controller/jmx/JMXPropertyProviderTest.java

@@ -20,7 +20,6 @@ package org.apache.ambari.server.controller.jmx;
 
 import org.apache.ambari.server.controller.internal.ResourceImpl;
 import org.apache.ambari.server.controller.spi.PropertyId;
-import org.apache.ambari.server.controller.spi.PropertyProvider;
 import org.apache.ambari.server.controller.spi.Request;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
@@ -28,6 +27,7 @@ import org.junit.Assert;
 import org.junit.Test;
 
 import java.util.Collections;
+import java.util.Map;
 import java.util.Set;
 
 /**
@@ -38,13 +38,13 @@ public class JMXPropertyProviderTest {
     @Test
   public void testGetResources() throws Exception {
 
-    Set< PropertyId >       propertyIds     = PropertyHelper.getPropertyIds(Resource.Type.HostComponent, "JMX");
-    TestStreamProvider      streamProvider  = new TestStreamProvider();
-    TestHostMappingProvider mappingProvider = new TestHostMappingProvider();
+    Set< PropertyId >   propertyIds    = PropertyHelper.getPropertyIds(Resource.Type.HostComponent, "JMX");
+    TestStreamProvider  streamProvider = new TestStreamProvider();
+    Map<String, String> hostMap        = TestHostMappingProvider.getHostMap();
 
-    PropertyProvider propertyProvider = new JMXPropertyProvider(propertyIds,
+    JMXPropertyProvider propertyProvider = new JMXPropertyProvider(propertyIds,
         streamProvider,
-        mappingProvider);
+        hostMap);
 
     // namenode
     Resource resource = new ResourceImpl(Resource.Type.HostComponent);
@@ -57,7 +57,7 @@ public class JMXPropertyProviderTest {
 
     Assert.assertEquals(1, propertyProvider.populateResources(Collections.singleton(resource), request, null).size());
 
-    Assert.assertEquals(JMXPropertyProvider.getSpec("ec2-50-17-129-192.compute-1.amazonaws.com:50070"), streamProvider.getLastSpec());
+    Assert.assertEquals(propertyProvider.getSpec("ec2-50-17-129-192.compute-1.amazonaws.com:50070"), streamProvider.getLastSpec());
 
     // see test/resources/hdfs_namenode_jmx.json for values
     Assert.assertEquals("1084287",  resource.getPropertyValue(PropertyHelper.getPropertyId("ReceivedBytes", "rpc")));
@@ -76,7 +76,7 @@ public class JMXPropertyProviderTest {
 
     propertyProvider.populateResources(Collections.singleton(resource), request, null);
 
-    Assert.assertEquals(JMXPropertyProvider.getSpec("ec2-23-23-71-42.compute-1.amazonaws.com:50075"), streamProvider.getLastSpec());
+    Assert.assertEquals(propertyProvider.getSpec("ec2-23-23-71-42.compute-1.amazonaws.com:50075"), streamProvider.getLastSpec());
 
     // see test/resources/hdfs_datanode_jmx.json for values
     Assert.assertEquals("0",  resource.getPropertyValue(PropertyHelper.getPropertyId("ReceivedBytes", "rpc")));
@@ -94,7 +94,7 @@ public class JMXPropertyProviderTest {
 
     propertyProvider.populateResources(Collections.singleton(resource), request, null);
 
-    Assert.assertEquals(JMXPropertyProvider.getSpec("ec2-23-23-71-42.compute-1.amazonaws.com:50030"), streamProvider.getLastSpec());
+    Assert.assertEquals(propertyProvider.getSpec("ec2-23-23-71-42.compute-1.amazonaws.com:50030"), streamProvider.getLastSpec());
 
     // see test/resources/mapreduce_jobtracker_jmx.json for values
     // resource should now contain 3 properties... host name, component name, and jvm.threadsWaiting (from request)

+ 4 - 3
ambari-server/src/test/java/org/apache/ambari/server/controller/jmx/TestHostMappingProvider.java

@@ -18,13 +18,15 @@
 
 package org.apache.ambari.server.controller.jmx;
 
+import org.apache.ambari.server.AmbariException;
+
 import java.util.HashMap;
 import java.util.Map;
 
 /**
  *
  */
-public class TestHostMappingProvider implements HostMappingProvider {
+public class TestHostMappingProvider {
 
   private static Map<String, String> HOST_MAPPING = new HashMap<String, String>();
 
@@ -35,8 +37,7 @@ public class TestHostMappingProvider implements HostMappingProvider {
     HOST_MAPPING.put("ip-10-110-157-51.ec2.internal",             "ec2-107-22-121-67.compute-1.amazonaws.com");
   }
 
-  @Override
-  public Map<String, String> getHostMap() {
+  public static Map<String, String> getHostMap() {
     return HOST_MAPPING;
   }
 }

+ 252 - 0
ambari-server/src/test/resources/temporal_ganglia.json

@@ -0,0 +1,252 @@
+[
+  {
+    "ds_name":"sum",
+    "cluster_name":"",
+    "graph_type":"stack",
+    "host_name":"",
+    "metric_name":"domU-12-31-39-0E-34-E1.compute-1.internal last hour   ",
+    "datapoints":[
+      [0, 1349899845],
+      [0, 1349899860],
+      [0, 1349899875],
+      [0, 1349899890],
+      [0, 1349899905],
+      [0, 1349899920],
+      [0, 1349899935],
+      [0, 1349899950],
+      [0, 1349899965],
+      [0, 1349899980],
+      [0, 1349899995],
+      [0, 1349900010],
+      [0, 1349900025],
+      [0, 1349900040],
+      [0, 1349900055],
+      [0, 1349900070],
+      [0, 1349900085],
+      [0, 1349900100],
+      [0, 1349900115],
+      [0, 1349900130],
+      [0, 1349900145],
+      [0, 1349900160],
+      [0, 1349900175],
+      [0, 1349900190],
+      [0, 1349900205],
+      [0, 1349900220],
+      [0, 1349900235],
+      [0, 1349900250],
+      [0, 1349900265],
+      [0, 1349900280],
+      [0, 1349900295],
+      [0, 1349900310],
+      [0, 1349900325],
+      [0, 1349900340],
+      [0, 1349900355],
+      [0, 1349900370],
+      [0, 1349900385],
+      [0, 1349900400],
+      [0, 1349900415],
+      [0, 1349900430],
+      [0, 1349900445],
+      [0, 1349900460],
+      [0, 1349900475],
+      [0, 1349900490],
+      [0, 1349900505],
+      [0, 1349900520],
+      [0, 1349900535],
+      [0, 1349900550],
+      [0, 1349900565],
+      [0, 1349900580],
+      [0, 1349900595],
+      [0, 1349900610],
+      [0, 1349900625],
+      [0, 1349900640],
+      [0, 1349900655],
+      [0, 1349900670],
+      [0, 1349900685],
+      [0, 1349900700],
+      [0, 1349900715],
+      [0, 1349900730],
+      [0, 1349900745],
+      [0, 1349900760],
+      [0, 1349900775],
+      [0, 1349900790],
+      [0, 1349900805],
+      [0, 1349900820],
+      [0, 1349900835],
+      [0, 1349900850],
+      [0, 1349900865],
+      [0, 1349900880],
+      [0, 1349900895],
+      [0, 1349900910],
+      [0, 1349900925],
+      [0, 1349900940],
+      [0, 1349900955],
+      [0, 1349900970],
+      [0, 1349900985],
+      [0, 1349901000],
+      [0, 1349901015],
+      [0, 1349901030],
+      [0, 1349901045],
+      [0, 1349901060],
+      [0, 1349901075],
+      [0, 1349901090],
+      [0, 1349901105],
+      [0, 1349901120],
+      [0, 1349901135],
+      [0, 1349901150],
+      [0, 1349901165],
+      [0, 1349901180],
+      [0, 1349901195],
+      [0, 1349901210],
+      [0, 1349901225],
+      [0, 1349901240],
+      [0, 1349901255],
+      [0, 1349901270],
+      [0, 1349901285],
+      [0, 1349901300],
+      [0, 1349901315],
+      [0, 1349901330],
+      [0, 1349901345],
+      [0, 1349901360],
+      [1.85333333, 1349901375],
+      [12.0466667, 1349901390],
+      [0, 1349901405],
+      [0, 1349901420],
+      [0, 1349901435],
+      [0, 1349901450],
+      [0, 1349901465],
+      [0, 1349901480],
+      [0, 1349901495],
+      [0, 1349901510],
+      [0, 1349901525],
+      [0, 1349901540],
+      [0, 1349901555],
+      [0, 1349901570],
+      [0, 1349901585],
+      [0, 1349901600],
+      [0, 1349901615],
+      [0, 1349901630],
+      [0, 1349901645],
+      [0, 1349901660],
+      [0, 1349901675],
+      [0, 1349901690],
+      [0, 1349901705],
+      [0, 1349901720],
+      [0, 1349901735],
+      [0, 1349901750],
+      [0, 1349901765],
+      [0, 1349901780],
+      [0, 1349901795],
+      [0, 1349901810],
+      [0, 1349901825],
+      [0, 1349901840],
+      [0, 1349901855],
+      [0, 1349901870],
+      [0, 1349901885],
+      [0, 1349901900],
+      [0, 1349901915],
+      [0, 1349901930],
+      [0, 1349901945],
+      [0, 1349901960],
+      [0, 1349901975],
+      [0, 1349901990],
+      [0, 1349902005],
+      [0, 1349902020],
+      [0, 1349902035],
+      [0, 1349902050],
+      [0, 1349902065],
+      [0, 1349902080],
+      [0, 1349902095],
+      [0, 1349902110],
+      [0, 1349902125],
+      [0, 1349902140],
+      [0, 1349902155],
+      [0, 1349902170],
+      [0, 1349902185],
+      [0, 1349902200],
+      [0, 1349902215],
+      [0, 1349902230],
+      [0, 1349902245],
+      [0, 1349902260],
+      [0, 1349902275],
+      [0, 1349902290],
+      [0, 1349902305],
+      [0, 1349902320],
+      [0, 1349902335],
+      [0, 1349902350],
+      [0, 1349902365],
+      [0, 1349902380],
+      [0, 1349902395],
+      [0, 1349902410],
+      [0, 1349902425],
+      [0, 1349902440],
+      [0, 1349902455],
+      [0, 1349902470],
+      [0, 1349902485],
+      [0, 1349902500],
+      [0, 1349902515],
+      [0, 1349902530],
+      [0, 1349902545],
+      [0, 1349902560],
+      [0, 1349902575],
+      [0, 1349902590],
+      [0, 1349902605],
+      [0, 1349902620],
+      [0, 1349902635],
+      [0, 1349902650],
+      [0, 1349902665],
+      [0, 1349902680],
+      [0, 1349902695],
+      [0, 1349902710],
+      [0, 1349902725],
+      [0, 1349902740],
+      [0, 1349902755],
+      [0, 1349902770],
+      [0, 1349902785],
+      [0, 1349902800],
+      [0, 1349902815],
+      [0, 1349902830],
+      [0, 1349902845],
+      [0, 1349902860],
+      [0, 1349902875],
+      [0, 1349902890],
+      [0, 1349902905],
+      [0, 1349902920],
+      [0, 1349902935],
+      [0, 1349902950],
+      [0, 1349902965],
+      [0, 1349902980],
+      [0, 1349902995],
+      [0, 1349903010],
+      [0, 1349903025],
+      [0, 1349903040],
+      [0, 1349903055],
+      [0, 1349903070],
+      [0, 1349903085],
+      [0, 1349903100],
+      [0, 1349903115],
+      [0, 1349903130],
+      [0, 1349903145],
+      [0, 1349903160],
+      [0, 1349903175],
+      [0, 1349903190],
+      [0, 1349903205],
+      [0, 1349903220],
+      [0, 1349903235],
+      [0, 1349903250],
+      [0, 1349903265],
+      [0, 1349903280],
+      [0, 1349903295],
+      [0, 1349903310],
+      [0, 1349903325],
+      [0, 1349903340],
+      [0, 1349903355],
+      [0, 1349903370],
+      [0, 1349903385],
+      [0, 1349903400],
+      [0, 1349903415],
+      [0, 1349903430],
+      [0, 1349903445]
+    ]
+  }
+]