Browse Source

AMBARI-8973 Add precision option support to Ambari Metrics API (dsen)

Dmytro Sen 10 năm trước cách đây
mục cha
commit
e7e865dee2
20 tập tin đã thay đổi với 768 bổ sung193 xóa
  1. 4 4
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
  2. 46 2
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
  3. 103 9
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixTransactSQL.java
  4. 37 0
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/Precision.java
  5. 1 1
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricAggregator.java
  6. 1 1
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricClusterAggregator.java
  7. 4 2
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricClusterAggregatorHourly.java
  8. 3 2
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricStore.java
  9. 15 4
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
  10. 25 1
      ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java
  11. 21 100
      ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITClusterAggregator.java
  12. 6 54
      ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITMetricAggregator.java
  13. 271 0
      ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITPhoenixHBaseAccessor.java
  14. 96 0
      ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricTestHelper.java
  15. 1 1
      ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestClusterSuite.java
  16. 126 7
      ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java
  17. 2 2
      ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java
  18. 1 1
      ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java
  19. 3 1
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
  20. 2 1
      ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProvider.java

+ 4 - 4
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java

@@ -113,11 +113,11 @@ public class HBaseTimelineMetricStore extends AbstractService
   @Override
   @Override
   public TimelineMetrics getTimelineMetrics(List<String> metricNames,
   public TimelineMetrics getTimelineMetrics(List<String> metricNames,
       String hostname, String applicationId, String instanceId,
       String hostname, String applicationId, String instanceId,
-      Long startTime, Long endTime, Integer limit,
+      Long startTime, Long endTime, Precision precision, Integer limit,
       boolean groupedByHosts) throws SQLException, IOException {
       boolean groupedByHosts) throws SQLException, IOException {
 
 
     Condition condition = new LikeCondition(metricNames, hostname, applicationId,
     Condition condition = new LikeCondition(metricNames, hostname, applicationId,
-      instanceId, startTime, endTime, limit, groupedByHosts);
+      instanceId, startTime, endTime, precision, limit, groupedByHosts);
 
 
     if (hostname == null) {
     if (hostname == null) {
       return hBaseAccessor.getAggregateMetricRecords(condition);
       return hBaseAccessor.getAggregateMetricRecords(condition);
@@ -129,12 +129,12 @@ public class HBaseTimelineMetricStore extends AbstractService
   @Override
   @Override
   public TimelineMetric getTimelineMetric(String metricName, String hostname,
   public TimelineMetric getTimelineMetric(String metricName, String hostname,
       String applicationId, String instanceId, Long startTime,
       String applicationId, String instanceId, Long startTime,
-      Long endTime, Integer limit)
+      Long endTime, Precision precision, Integer limit)
       throws SQLException, IOException {
       throws SQLException, IOException {
 
 
     TimelineMetrics metrics = hBaseAccessor.getMetricRecords(
     TimelineMetrics metrics = hBaseAccessor.getMetricRecords(
       new LikeCondition(Collections.singletonList(metricName), hostname,
       new LikeCondition(Collections.singletonList(metricName), hostname,
-        applicationId, instanceId, startTime, endTime, limit, true)
+        applicationId, instanceId, startTime, endTime, precision, limit, true)
     );
     );
 
 
     TimelineMetric metric = new TimelineMetric();
     TimelineMetric metric = new TimelineMetric();

+ 46 - 2
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java

@@ -181,6 +181,23 @@ public class PhoenixHBaseAccessor {
     return metric;
     return metric;
   }
   }
 
 
+  static TimelineMetric getAggregatedTimelineMetricFromResultSet(ResultSet rs)
+      throws SQLException, IOException {
+    TimelineMetric metric = new TimelineMetric();
+    metric.setMetricName(rs.getString("METRIC_NAME"));
+    metric.setHostName(rs.getString("HOSTNAME"));
+    metric.setAppId(rs.getString("APP_ID"));
+    metric.setInstanceId(rs.getString("INSTANCE_ID"));
+    metric.setTimestamp(rs.getLong("SERVER_TIME"));
+    metric.setStartTime(rs.getLong("SERVER_TIME"));
+    metric.setType(rs.getString("UNITS"));
+    Map<Long, Double> valueMap = new HashMap<Long, Double>();
+    valueMap.put(rs.getLong("SERVER_TIME"),
+        rs.getDouble("METRIC_SUM") / rs.getInt("METRIC_COUNT"));
+    metric.setMetricValues(valueMap);
+    return metric;
+  }
+
   private static Map<Long, Double> readLastMetricValueFromJSON(String json)
   private static Map<Long, Double> readLastMetricValueFromJSON(String json)
     throws IOException {
     throws IOException {
     Map<Long, Double> values = readMetricFromJSON(json);
     Map<Long, Double> values = readMetricFromJSON(json);
@@ -413,7 +430,13 @@ public class PhoenixHBaseAccessor {
 
 
         ResultSet rs = stmt.executeQuery();
         ResultSet rs = stmt.executeQuery();
         while (rs.next()) {
         while (rs.next()) {
-          TimelineMetric metric = getTimelineMetricFromResultSet(rs);
+          TimelineMetric metric;
+          if (condition.getPrecision() == Precision.HOURS
+              || condition.getPrecision() == Precision.MINUTES) {
+            metric = getAggregatedTimelineMetricFromResultSet(rs);
+          } else {
+            metric = getTimelineMetricFromResultSet(rs);
+          }
 
 
           if (condition.isGrouped()) {
           if (condition.isGrouped()) {
             metrics.addOrMergeTimelineMetric(metric);
             metrics.addOrMergeTimelineMetric(metric);
@@ -491,7 +514,12 @@ public class PhoenixHBaseAccessor {
 
 
         ResultSet rs = stmt.executeQuery();
         ResultSet rs = stmt.executeQuery();
         while (rs.next()) {
         while (rs.next()) {
-          TimelineMetric metric = getAggregateTimelineMetricFromResultSet(rs);
+          TimelineMetric metric;
+          if (condition.getPrecision() == Precision.HOURS) {
+            metric = getAggregateHoursTimelineMetricFromResultSet(rs);
+          } else {
+            metric = getAggregateTimelineMetricFromResultSet(rs);
+          }
 
 
           if (condition.isGrouped()) {
           if (condition.isGrouped()) {
             metrics.addOrMergeTimelineMetric(metric);
             metrics.addOrMergeTimelineMetric(metric);
@@ -560,6 +588,22 @@ public class PhoenixHBaseAccessor {
     return metric;
     return metric;
   }
   }
 
 
+  private TimelineMetric getAggregateHoursTimelineMetricFromResultSet(
+    ResultSet rs) throws SQLException {
+    TimelineMetric metric = new TimelineMetric();
+    metric.setMetricName(rs.getString("METRIC_NAME"));
+    metric.setAppId(rs.getString("APP_ID"));
+    metric.setInstanceId(rs.getString("INSTANCE_ID"));
+    metric.setTimestamp(rs.getLong("SERVER_TIME"));
+    metric.setStartTime(rs.getLong("SERVER_TIME"));
+    Map<Long, Double> valueMap = new HashMap<Long, Double>();
+    valueMap.put(rs.getLong("SERVER_TIME"),
+        rs.getDouble("METRIC_SUM") / rs.getInt("METRIC_COUNT"));
+    metric.setMetricValues(valueMap);
+
+    return metric;
+  }
+
   private void verifyCondition(Condition condition) throws SQLException {
   private void verifyCondition(Condition condition) throws SQLException {
     if (condition.isEmpty()) {
     if (condition.isEmpty()) {
       throw new SQLException("No filter criteria specified.");
       throw new SQLException("No filter criteria specified.");

+ 103 - 9
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixTransactSQL.java

@@ -192,7 +192,17 @@ public class PhoenixTransactSQL {
     "HOSTS_COUNT, " +
     "HOSTS_COUNT, " +
     "METRIC_MAX, " +
     "METRIC_MAX, " +
     "METRIC_MIN " +
     "METRIC_MIN " +
-    "FROM METRIC_AGGREGATE";
+    "FROM %s";
+
+  public static final String GET_CLUSTER_AGGREGATE_HOURLY_SQL = "SELECT %s " +
+      "METRIC_NAME, APP_ID, " +
+      "INSTANCE_ID, SERVER_TIME, " +
+      "UNITS, " +
+      "METRIC_SUM, " +
+      "METRIC_COUNT, " +
+      "METRIC_MAX, " +
+      "METRIC_MIN " +
+      "FROM %s";
 
 
   public static final String METRICS_RECORD_TABLE_NAME = "METRIC_RECORD";
   public static final String METRICS_RECORD_TABLE_NAME = "METRIC_RECORD";
   public static final String METRICS_AGGREGATE_MINUTE_TABLE_NAME =
   public static final String METRICS_AGGREGATE_MINUTE_TABLE_NAME =
@@ -206,6 +216,8 @@ public class PhoenixTransactSQL {
   public static final String DEFAULT_TABLE_COMPRESSION = "SNAPPY";
   public static final String DEFAULT_TABLE_COMPRESSION = "SNAPPY";
   public static final String DEFAULT_ENCODING = "FAST_DIFF";
   public static final String DEFAULT_ENCODING = "FAST_DIFF";
   public static final long NATIVE_TIME_RANGE_DELTA = 120000; // 2 minutes
   public static final long NATIVE_TIME_RANGE_DELTA = 120000; // 2 minutes
+  public static final long HOUR = 3600000; // 1 hour
+  public static final long DAY = 86400000; // 1 day
 
 
   /**
   /**
    * Filter to optimize HBase scan by using file timestamps. This prevents
    * Filter to optimize HBase scan by using file timestamps. This prevents
@@ -227,9 +239,43 @@ public class PhoenixTransactSQL {
     if (condition.getStatement() != null) {
     if (condition.getStatement() != null) {
       stmtStr = condition.getStatement();
       stmtStr = condition.getStatement();
     } else {
     } else {
-      stmtStr = String.format(GET_METRIC_SQL,
+
+      String metricsTable;
+      String query;
+      if (condition.getPrecision() == null) {
+        Long timeRange = condition.getEndTime() - condition.getStartTime();
+        if (timeRange > DAY) {
+          metricsTable = METRICS_AGGREGATE_HOURLY_TABLE_NAME;
+          query = GET_METRIC_AGGREGATE_ONLY_SQL;
+          condition.setPrecision(Precision.HOURS);
+        } else if (timeRange > HOUR) {
+          metricsTable = METRICS_AGGREGATE_MINUTE_TABLE_NAME;
+          query = GET_METRIC_AGGREGATE_ONLY_SQL;
+          condition.setPrecision(Precision.MINUTES);
+        } else {
+          metricsTable = METRICS_RECORD_TABLE_NAME;
+          query = GET_METRIC_SQL;
+          condition.setPrecision(Precision.SECONDS);
+        }
+      } else {
+        switch (condition.getPrecision()) {
+          case HOURS:
+            metricsTable = METRICS_AGGREGATE_HOURLY_TABLE_NAME;
+            query = GET_METRIC_AGGREGATE_ONLY_SQL;
+            break;
+          case MINUTES:
+            metricsTable = METRICS_AGGREGATE_MINUTE_TABLE_NAME;
+            query = GET_METRIC_AGGREGATE_ONLY_SQL;
+            break;
+          default:
+            metricsTable = METRICS_RECORD_TABLE_NAME;
+            query = GET_METRIC_SQL;
+        }
+      }
+
+      stmtStr = String.format(query,
         getNaiveTimeRangeHint(condition.getStartTime(), NATIVE_TIME_RANGE_DELTA),
         getNaiveTimeRangeHint(condition.getStartTime(), NATIVE_TIME_RANGE_DELTA),
-        METRICS_RECORD_TABLE_NAME);
+        metricsTable);
     }
     }
 
 
     StringBuilder sb = new StringBuilder(stmtStr);
     StringBuilder sb = new StringBuilder(stmtStr);
@@ -378,7 +424,32 @@ public class PhoenixTransactSQL {
       throw new IllegalArgumentException("Condition is empty.");
       throw new IllegalArgumentException("Condition is empty.");
     }
     }
 
 
-    StringBuilder sb = new StringBuilder(GET_CLUSTER_AGGREGATE_SQL);
+    String metricsAggregateTable;
+    String queryStmt;
+    if (condition.getPrecision() == null) {
+      Long timeRange = condition.getEndTime() - condition.getStartTime();
+      if (timeRange > DAY) {
+        metricsAggregateTable = METRICS_CLUSTER_AGGREGATE_HOURLY_TABLE_NAME;
+        queryStmt = GET_CLUSTER_AGGREGATE_HOURLY_SQL;
+        condition.setPrecision(Precision.HOURS);
+      } else {
+        metricsAggregateTable = METRICS_CLUSTER_AGGREGATE_TABLE_NAME;
+        queryStmt = GET_CLUSTER_AGGREGATE_SQL;
+        condition.setPrecision(Precision.SECONDS);
+      }
+    } else {
+      switch (condition.getPrecision()) {
+        case HOURS:
+          metricsAggregateTable = METRICS_CLUSTER_AGGREGATE_HOURLY_TABLE_NAME;
+          queryStmt = GET_CLUSTER_AGGREGATE_HOURLY_SQL;
+          break;
+        default:
+          metricsAggregateTable = METRICS_CLUSTER_AGGREGATE_TABLE_NAME;
+          queryStmt = GET_CLUSTER_AGGREGATE_SQL;
+      }
+    }
+
+    StringBuilder sb = new StringBuilder(queryStmt);
     sb.append(" WHERE ");
     sb.append(" WHERE ");
     sb.append(condition.getConditionClause());
     sb.append(condition.getConditionClause());
     sb.append(" ORDER BY METRIC_NAME, SERVER_TIME");
     sb.append(" ORDER BY METRIC_NAME, SERVER_TIME");
@@ -388,7 +459,7 @@ public class PhoenixTransactSQL {
 
 
     String query = String.format(sb.toString(),
     String query = String.format(sb.toString(),
       PhoenixTransactSQL.getNaiveTimeRangeHint(condition.getStartTime(),
       PhoenixTransactSQL.getNaiveTimeRangeHint(condition.getStartTime(),
-        NATIVE_TIME_RANGE_DELTA));
+        NATIVE_TIME_RANGE_DELTA), metricsAggregateTable);
     if (LOG.isDebugEnabled()) {
     if (LOG.isDebugEnabled()) {
       LOG.debug("SQL => " + query + ", condition => " + condition);
       LOG.debug("SQL => " + query + ", condition => " + condition);
     }
     }
@@ -482,6 +553,8 @@ public class PhoenixTransactSQL {
     boolean isGrouped();
     boolean isGrouped();
     void setStatement(String statement);
     void setStatement(String statement);
     String getHostname();
     String getHostname();
+    Precision getPrecision();
+    void setPrecision(Precision precision);
     String getAppId();
     String getAppId();
     String getInstanceId();
     String getInstanceId();
     String getConditionClause();
     String getConditionClause();
@@ -503,6 +576,7 @@ public class PhoenixTransactSQL {
     String instanceId;
     String instanceId;
     Long startTime;
     Long startTime;
     Long endTime;
     Long endTime;
+    Precision precision;
     Integer limit;
     Integer limit;
     boolean grouped;
     boolean grouped;
     boolean noLimit = false;
     boolean noLimit = false;
@@ -511,14 +585,15 @@ public class PhoenixTransactSQL {
     Set<String> orderByColumns = new LinkedHashSet<String>();
     Set<String> orderByColumns = new LinkedHashSet<String>();
 
 
     DefaultCondition(List<String> metricNames, String hostname, String appId,
     DefaultCondition(List<String> metricNames, String hostname, String appId,
-              String instanceId, Long startTime, Long endTime, Integer limit,
-              boolean grouped) {
+              String instanceId, Long startTime, Long endTime, Precision precision,
+              Integer limit, boolean grouped) {
       this.metricNames = metricNames;
       this.metricNames = metricNames;
       this.hostname = hostname;
       this.hostname = hostname;
       this.appId = appId;
       this.appId = appId;
       this.instanceId = instanceId;
       this.instanceId = instanceId;
       this.startTime = startTime;
       this.startTime = startTime;
       this.endTime = endTime;
       this.endTime = endTime;
+      this.precision = precision;
       this.limit = limit;
       this.limit = limit;
       this.grouped = grouped;
       this.grouped = grouped;
     }
     }
@@ -591,6 +666,14 @@ public class PhoenixTransactSQL {
     public String getHostname() {
     public String getHostname() {
       return hostname == null || hostname.isEmpty() ? null : hostname;
       return hostname == null || hostname.isEmpty() ? null : hostname;
     }
     }
+    
+    public Precision getPrecision() {
+      return precision;
+    }
+
+    public void setPrecision(Precision precision) {
+      this.precision = precision;
+    }
 
 
     public String getAppId() {
     public String getAppId() {
       if (appId != null && !appId.isEmpty()) {
       if (appId != null && !appId.isEmpty()) {
@@ -708,9 +791,10 @@ public class PhoenixTransactSQL {
 
 
     LikeCondition(List<String> metricNames, String hostname,
     LikeCondition(List<String> metricNames, String hostname,
                   String appId, String instanceId, Long startTime,
                   String appId, String instanceId, Long startTime,
-                  Long endTime, Integer limit, boolean grouped) {
+                  Long endTime, Precision precision, Integer limit,
+                  boolean grouped) {
       super(metricNames, hostname, appId, instanceId, startTime, endTime,
       super(metricNames, hostname, appId, instanceId, startTime, endTime,
-        limit, grouped);
+        precision, limit, grouped);
     }
     }
 
 
     @Override
     @Override
@@ -779,6 +863,16 @@ public class PhoenixTransactSQL {
       return adaptee.getHostname();
       return adaptee.getHostname();
     }
     }
 
 
+    @Override
+    public Precision getPrecision() {
+      return adaptee.getPrecision();
+    }
+
+    @Override
+    public void setPrecision(Precision precision) {
+      adaptee.setPrecision(precision);
+    }
+
     @Override
     @Override
     public String getAppId() {
     public String getAppId() {
       return adaptee.getAppId();
       return adaptee.getAppId();

+ 37 - 0
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/Precision.java

@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
+
+/**
+ * Is used to determine metrics aggregate table.
+ *
+ * @see org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.TimelineWebServices#getTimelineMetric
+ * @see org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.TimelineWebServices#getTimelineMetrics
+ */
+public enum Precision {
+  SECONDS,
+  MINUTES,
+  HOURS;
+
+  public static Precision getPrecision(String precision) throws IllegalArgumentException {
+    if (precision == null ) {
+      return null;
+    }
+    return Precision.valueOf(precision.toUpperCase());
+  }
+}

+ 1 - 1
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricAggregator.java

@@ -80,7 +80,7 @@ public class TimelineMetricAggregator extends AbstractTimelineAggregator {
   @Override
   @Override
   protected Condition prepareMetricQueryCondition(long startTime, long endTime) {
   protected Condition prepareMetricQueryCondition(long startTime, long endTime) {
     Condition condition = new DefaultCondition(null, null, null, null, startTime,
     Condition condition = new DefaultCondition(null, null, null, null, startTime,
-      endTime, null, true);
+      endTime, null, null, true);
     condition.setNoLimit();
     condition.setNoLimit();
     condition.setFetchSize(resultsetFetchSize);
     condition.setFetchSize(resultsetFetchSize);
     condition.setStatement(String.format(GET_METRIC_AGGREGATE_ONLY_SQL,
     condition.setStatement(String.format(GET_METRIC_AGGREGATE_ONLY_SQL,

+ 1 - 1
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricClusterAggregator.java

@@ -93,7 +93,7 @@ public class TimelineMetricClusterAggregator extends AbstractTimelineAggregator
   @Override
   @Override
   protected Condition prepareMetricQueryCondition(long startTime, long endTime) {
   protected Condition prepareMetricQueryCondition(long startTime, long endTime) {
     Condition condition = new DefaultCondition(null, null, null, null, startTime,
     Condition condition = new DefaultCondition(null, null, null, null, startTime,
-      endTime, null, true);
+      endTime, null, null, true);
     condition.setNoLimit();
     condition.setNoLimit();
     condition.setFetchSize(resultsetFetchSize);
     condition.setFetchSize(resultsetFetchSize);
     condition.setStatement(String.format(GET_METRIC_SQL,
     condition.setStatement(String.format(GET_METRIC_SQL,

+ 4 - 2
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricClusterAggregatorHourly.java

@@ -34,6 +34,7 @@ import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.ti
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.DefaultCondition;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.DefaultCondition;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.GET_CLUSTER_AGGREGATE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.GET_CLUSTER_AGGREGATE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_CLUSTER_AGGREGATE_HOURLY_TABLE_NAME;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_CLUSTER_AGGREGATE_HOURLY_TABLE_NAME;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_CLUSTER_AGGREGATE_TABLE_NAME;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.CLUSTER_AGGREGATOR_HOUR_CHECKPOINT_CUTOFF_INTERVAL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.CLUSTER_AGGREGATOR_HOUR_CHECKPOINT_CUTOFF_INTERVAL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.CLUSTER_AGGREGATOR_HOUR_CHECKPOINT_CUTOFF_MULTIPLIER;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.CLUSTER_AGGREGATOR_HOUR_CHECKPOINT_CUTOFF_MULTIPLIER;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.CLUSTER_AGGREGATOR_HOUR_DISABLED;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.CLUSTER_AGGREGATOR_HOUR_DISABLED;
@@ -91,11 +92,12 @@ public class TimelineMetricClusterAggregatorHourly extends
   protected Condition prepareMetricQueryCondition(long startTime,
   protected Condition prepareMetricQueryCondition(long startTime,
                                                   long endTime) {
                                                   long endTime) {
     Condition condition = new DefaultCondition(null, null, null, null, startTime,
     Condition condition = new DefaultCondition(null, null, null, null, startTime,
-      endTime, null, true);
+      endTime, null, null, true);
     condition.setNoLimit();
     condition.setNoLimit();
     condition.setFetchSize(resultsetFetchSize);
     condition.setFetchSize(resultsetFetchSize);
     condition.setStatement(String.format(GET_CLUSTER_AGGREGATE_SQL,
     condition.setStatement(String.format(GET_CLUSTER_AGGREGATE_SQL,
-      PhoenixTransactSQL.getNaiveTimeRangeHint(startTime, NATIVE_TIME_RANGE_DELTA)));
+      PhoenixTransactSQL.getNaiveTimeRangeHint(startTime, NATIVE_TIME_RANGE_DELTA),
+        METRICS_CLUSTER_AGGREGATE_TABLE_NAME));
     condition.addOrderByColumn("METRIC_NAME");
     condition.addOrderByColumn("METRIC_NAME");
     condition.addOrderByColumn("APP_ID");
     condition.addOrderByColumn("APP_ID");
     condition.addOrderByColumn("INSTANCE_ID");
     condition.addOrderByColumn("INSTANCE_ID");

+ 3 - 2
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricStore.java

@@ -34,6 +34,7 @@ public interface TimelineMetricStore {
    * @param instanceId Application instance id.
    * @param instanceId Application instance id.
    * @param startTime Start timestamp
    * @param startTime Start timestamp
    * @param endTime End timestamp
    * @param endTime End timestamp
+   * @param precision Precision [ seconds, minutes, hours ]
    * @param limit Override default result limit
    * @param limit Override default result limit
    * @param groupedByHosts Group {@link TimelineMetric} by metric name, hostname,
    * @param groupedByHosts Group {@link TimelineMetric} by metric name, hostname,
    *                app id and instance id
    *                app id and instance id
@@ -43,7 +44,7 @@ public interface TimelineMetricStore {
    */
    */
   TimelineMetrics getTimelineMetrics(List<String> metricNames, String hostname,
   TimelineMetrics getTimelineMetrics(List<String> metricNames, String hostname,
       String applicationId, String instanceId, Long startTime,
       String applicationId, String instanceId, Long startTime,
-      Long endTime, Integer limit, boolean groupedByHosts)
+      Long endTime, Precision precision, Integer limit, boolean groupedByHosts)
     throws SQLException, IOException;
     throws SQLException, IOException;
 
 
 
 
@@ -53,7 +54,7 @@ public interface TimelineMetricStore {
    */
    */
   TimelineMetric getTimelineMetric(String metricName, String hostname,
   TimelineMetric getTimelineMetric(String metricName, String hostname,
       String applicationId, String instanceId, Long startTime,
       String applicationId, String instanceId, Long startTime,
-      Long endTime, Integer limit)
+      Long endTime, Precision precision, Integer limit)
       throws SQLException, IOException;
       throws SQLException, IOException;
 
 
 
 

+ 15 - 4
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java

@@ -30,6 +30,7 @@ import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
 import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.Precision;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricStore;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricStore;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.EntityIdentifier;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.EntityIdentifier;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper;
@@ -295,17 +296,20 @@ public class TimelineWebServices {
     @QueryParam("hostname") String hostname,
     @QueryParam("hostname") String hostname,
     @QueryParam("startTime") String startTime,
     @QueryParam("startTime") String startTime,
     @QueryParam("endTime") String endTime,
     @QueryParam("endTime") String endTime,
+    @QueryParam("precision") String precision,
     @QueryParam("limit") String limit
     @QueryParam("limit") String limit
   ) {
   ) {
     init(res);
     init(res);
     try {
     try {
       return timelineMetricStore.getTimelineMetric(metricName, hostname,
       return timelineMetricStore.getTimelineMetric(metricName, hostname,
         appId, instanceId, parseLongStr(startTime), parseLongStr(endTime),
         appId, instanceId, parseLongStr(startTime), parseLongStr(endTime),
-        parseIntStr(limit));
-
+        Precision.getPrecision(precision), parseIntStr(limit));
     } catch (NumberFormatException ne) {
     } catch (NumberFormatException ne) {
       throw new BadRequestException("startTime and limit should be numeric " +
       throw new BadRequestException("startTime and limit should be numeric " +
         "values");
         "values");
+    } catch (IllegalArgumentException iae) {
+      throw new BadRequestException("precision should be seconds, minutes " +
+        "or hours");
     } catch (SQLException sql) {
     } catch (SQLException sql) {
       throw new WebApplicationException(sql,
       throw new WebApplicationException(sql,
         Response.Status.INTERNAL_SERVER_ERROR);
         Response.Status.INTERNAL_SERVER_ERROR);
@@ -325,6 +329,7 @@ public class TimelineWebServices {
    * @param instanceId Application instance id.
    * @param instanceId Application instance id.
    * @param hostname Hostname where the metrics originated.
    * @param hostname Hostname where the metrics originated.
    * @param startTime Start time for the metric records retrieved.
    * @param startTime Start time for the metric records retrieved.
+   * @param precision Precision [ seconds, minutes, hours ]
    * @param limit limit on total number of {@link TimelineMetric} records
    * @param limit limit on total number of {@link TimelineMetric} records
    *              retrieved.
    *              retrieved.
    * @return {@link TimelineMetrics}
    * @return {@link TimelineMetrics}
@@ -341,6 +346,7 @@ public class TimelineWebServices {
     @QueryParam("hostname") String hostname,
     @QueryParam("hostname") String hostname,
     @QueryParam("startTime") String startTime,
     @QueryParam("startTime") String startTime,
     @QueryParam("endTime") String endTime,
     @QueryParam("endTime") String endTime,
+    @QueryParam("precision") String precision,
     @QueryParam("limit") String limit,
     @QueryParam("limit") String limit,
     @QueryParam("grouped") String grouped
     @QueryParam("grouped") String grouped
   ) {
   ) {
@@ -349,16 +355,21 @@ public class TimelineWebServices {
       LOG.debug("Request for metrics => metricNames: " + metricNames + ", " +
       LOG.debug("Request for metrics => metricNames: " + metricNames + ", " +
         "appId: " + appId + ", instanceId: " + instanceId + ", " +
         "appId: " + appId + ", instanceId: " + instanceId + ", " +
         "hostname: " + hostname + ", startTime: " + startTime + ", " +
         "hostname: " + hostname + ", startTime: " + startTime + ", " +
-        "endTime: " + endTime);
+        "endTime: " + endTime + ", " +
+        "precision: " + precision);
 
 
       return timelineMetricStore.getTimelineMetrics(
       return timelineMetricStore.getTimelineMetrics(
         parseListStr(metricNames, ","), hostname, appId, instanceId,
         parseListStr(metricNames, ","), hostname, appId, instanceId,
-        parseLongStr(startTime), parseLongStr(endTime), parseIntStr(limit),
+        parseLongStr(startTime), parseLongStr(endTime),
+        Precision.getPrecision(precision), parseIntStr(limit),
         parseBoolean(grouped));
         parseBoolean(grouped));
 
 
     } catch (NumberFormatException ne) {
     } catch (NumberFormatException ne) {
       throw new BadRequestException("startTime and limit should be numeric " +
       throw new BadRequestException("startTime and limit should be numeric " +
         "values");
         "values");
+    } catch (IllegalArgumentException iae) {
+      throw new BadRequestException("precision should be seconds, minutes " +
+        "or hours");
     } catch (SQLException sql) {
     } catch (SQLException sql) {
       throw new WebApplicationException(sql,
       throw new WebApplicationException(sql,
         Response.Status.INTERNAL_SERVER_ERROR);
         Response.Status.INTERNAL_SERVER_ERROR);

+ 25 - 1
ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java

@@ -17,7 +17,7 @@
  */
  */
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 
 
-import com.google.common.collect.Maps;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.phoenix.hbase.index.write.IndexWriterUtils;
 import org.apache.phoenix.hbase.index.write.IndexWriterUtils;
 import org.apache.phoenix.query.BaseTest;
 import org.apache.phoenix.query.BaseTest;
@@ -36,6 +36,8 @@ import java.sql.Statement;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Map;
 import java.util.Properties;
 import java.util.Properties;
+
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.LOG;
 import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
 import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.assertj.core.api.Assertions.assertThat;
 
 
@@ -110,4 +112,26 @@ public abstract class AbstractMiniHBaseClusterTest extends BaseTest {
     conn.close();
     conn.close();
   }
   }
 
 
+  protected PhoenixHBaseAccessor createTestableHBaseAccessor() {
+    Configuration metricsConf = new Configuration();
+    metricsConf.set(
+        TimelineMetricConfiguration.HBASE_COMPRESSION_SCHEME, "NONE");
+
+    return
+        new PhoenixHBaseAccessor(
+            new Configuration(),
+            metricsConf,
+            new ConnectionProvider() {
+              @Override
+              public Connection getConnection() {
+                Connection connection = null;
+                try {
+                  connection = DriverManager.getConnection(getUrl());
+                } catch (SQLException e) {
+                  LOG.warn("Unable to connect to HBase store using Phoenix.", e);
+                }
+                return connection;
+              }
+            });
+  }
 }
 }

+ 21 - 100
ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITClusterAggregator.java

@@ -19,19 +19,15 @@ package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline
 
 
 
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.junit.After;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.Test;
 
 
 import java.sql.Connection;
 import java.sql.Connection;
-import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.SQLException;
 import java.sql.Statement;
 import java.sql.Statement;
-import java.util.Arrays;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Map;
 
 
@@ -40,8 +36,10 @@ import static junit.framework.Assert.fail;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.Condition;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.Condition;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.DefaultCondition;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.DefaultCondition;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.GET_CLUSTER_AGGREGATE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.GET_CLUSTER_AGGREGATE_SQL;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.LOG;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_CLUSTER_AGGREGATE_TABLE_NAME;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.NATIVE_TIME_RANGE_DELTA;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.NATIVE_TIME_RANGE_DELTA;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.MetricTestHelper.prepareSingleTimelineMetric;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.MetricTestHelper.createEmptyTimelineClusterMetric;
 
 
 public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
 public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
   private Connection conn;
   private Connection conn;
@@ -97,9 +95,10 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
 
 
     //THEN
     //THEN
     Condition condition = new DefaultCondition(null, null, null, null, startTime,
     Condition condition = new DefaultCondition(null, null, null, null, startTime,
-      endTime, null, true);
+      endTime, null, null, true);
     condition.setStatement(String.format(GET_CLUSTER_AGGREGATE_SQL,
     condition.setStatement(String.format(GET_CLUSTER_AGGREGATE_SQL,
-      PhoenixTransactSQL.getNaiveTimeRangeHint(startTime, NATIVE_TIME_RANGE_DELTA)));
+      PhoenixTransactSQL.getNaiveTimeRangeHint(startTime, NATIVE_TIME_RANGE_DELTA),
+      METRICS_CLUSTER_AGGREGATE_TABLE_NAME));
 
 
     PreparedStatement pstmt = PhoenixTransactSQL.prepareGetMetricsSqlStmt
     PreparedStatement pstmt = PhoenixTransactSQL.prepareGetMetricsSqlStmt
       (conn, condition);
       (conn, condition);
@@ -157,9 +156,10 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
 
 
     //THEN
     //THEN
     Condition condition = new DefaultCondition(null, null, null, null, startTime,
     Condition condition = new DefaultCondition(null, null, null, null, startTime,
-      endTime, null, true);
+      endTime, null, null, true);
     condition.setStatement(String.format(GET_CLUSTER_AGGREGATE_SQL,
     condition.setStatement(String.format(GET_CLUSTER_AGGREGATE_SQL,
-      PhoenixTransactSQL.getNaiveTimeRangeHint(startTime, NATIVE_TIME_RANGE_DELTA)));
+      PhoenixTransactSQL.getNaiveTimeRangeHint(startTime, NATIVE_TIME_RANGE_DELTA),
+      METRICS_CLUSTER_AGGREGATE_TABLE_NAME));
 
 
     PreparedStatement pstmt = PhoenixTransactSQL.prepareGetMetricsSqlStmt
     PreparedStatement pstmt = PhoenixTransactSQL.prepareGetMetricsSqlStmt
       (conn, condition);
       (conn, condition);
@@ -205,13 +205,13 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
     Map<TimelineClusterMetric, MetricClusterAggregate> records =
     Map<TimelineClusterMetric, MetricClusterAggregate> records =
       new HashMap<TimelineClusterMetric, MetricClusterAggregate>();
       new HashMap<TimelineClusterMetric, MetricClusterAggregate>();
 
 
-    records.put(createEmptyTimelineMetric(ctime),
+    records.put(createEmptyTimelineClusterMetric(ctime),
       new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
       new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
-    records.put(createEmptyTimelineMetric(ctime += minute),
+    records.put(createEmptyTimelineClusterMetric(ctime += minute),
       new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
       new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
-    records.put(createEmptyTimelineMetric(ctime += minute),
+    records.put(createEmptyTimelineClusterMetric(ctime += minute),
       new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
       new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
-    records.put(createEmptyTimelineMetric(ctime += minute),
+    records.put(createEmptyTimelineClusterMetric(ctime += minute),
       new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
       new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
 
 
     hdb.saveClusterAggregateRecords(records);
     hdb.saveClusterAggregateRecords(records);
@@ -249,24 +249,24 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
     Map<TimelineClusterMetric, MetricClusterAggregate> records =
     Map<TimelineClusterMetric, MetricClusterAggregate> records =
       new HashMap<TimelineClusterMetric, MetricClusterAggregate>();
       new HashMap<TimelineClusterMetric, MetricClusterAggregate>();
 
 
-    records.put(createEmptyTimelineMetric("disk_used", ctime),
+    records.put(createEmptyTimelineClusterMetric("disk_used", ctime),
       new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
       new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
-    records.put(createEmptyTimelineMetric("disk_free", ctime),
+    records.put(createEmptyTimelineClusterMetric("disk_free", ctime),
       new MetricClusterAggregate(1.0, 2, 0.0, 1.0, 1.0));
       new MetricClusterAggregate(1.0, 2, 0.0, 1.0, 1.0));
 
 
-    records.put(createEmptyTimelineMetric("disk_used", ctime += minute),
+    records.put(createEmptyTimelineClusterMetric("disk_used", ctime += minute),
       new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
       new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
-    records.put(createEmptyTimelineMetric("disk_free", ctime),
+    records.put(createEmptyTimelineClusterMetric("disk_free", ctime),
       new MetricClusterAggregate(1.0, 2, 0.0, 1.0, 1.0));
       new MetricClusterAggregate(1.0, 2, 0.0, 1.0, 1.0));
 
 
-    records.put(createEmptyTimelineMetric("disk_used", ctime += minute),
+    records.put(createEmptyTimelineClusterMetric("disk_used", ctime += minute),
       new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
       new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
-    records.put(createEmptyTimelineMetric("disk_free", ctime),
+    records.put(createEmptyTimelineClusterMetric("disk_free", ctime),
       new MetricClusterAggregate(1.0, 2, 0.0, 1.0, 1.0));
       new MetricClusterAggregate(1.0, 2, 0.0, 1.0, 1.0));
 
 
-    records.put(createEmptyTimelineMetric("disk_used", ctime += minute),
+    records.put(createEmptyTimelineClusterMetric("disk_used", ctime += minute),
       new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
       new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
-    records.put(createEmptyTimelineMetric("disk_free", ctime),
+    records.put(createEmptyTimelineClusterMetric("disk_free", ctime),
       new MetricClusterAggregate(1.0, 2, 0.0, 1.0, 1.0));
       new MetricClusterAggregate(1.0, 2, 0.0, 1.0, 1.0));
 
 
     hdb.saveClusterAggregateRecords(records);
     hdb.saveClusterAggregateRecords(records);
@@ -303,83 +303,4 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
     Statement stmt = conn.createStatement();
     Statement stmt = conn.createStatement();
     return stmt.executeQuery(query);
     return stmt.executeQuery(query);
   }
   }
-
-  private TimelineClusterMetric createEmptyTimelineMetric(String name,
-                                                          long startTime) {
-    TimelineClusterMetric metric = new TimelineClusterMetric(name,
-      "test_app", null, startTime, null);
-
-    return metric;
-  }
-
-  private TimelineClusterMetric createEmptyTimelineMetric(long startTime) {
-    return createEmptyTimelineMetric("disk_used", startTime);
-  }
-
-  private MetricHostAggregate
-  createMetricHostAggregate(double max, double min, int numberOfSamples,
-                            double sum) {
-    MetricHostAggregate expectedAggregate =
-      new MetricHostAggregate();
-    expectedAggregate.setMax(max);
-    expectedAggregate.setMin(min);
-    expectedAggregate.setNumberOfSamples(numberOfSamples);
-    expectedAggregate.setSum(sum);
-
-    return expectedAggregate;
-  }
-
-  private PhoenixHBaseAccessor createTestableHBaseAccessor() {
-    Configuration metricsConf = new Configuration();
-    metricsConf.set(
-      TimelineMetricConfiguration.HBASE_COMPRESSION_SCHEME, "NONE");
-
-    return
-      new PhoenixHBaseAccessor(
-        new Configuration(),
-        metricsConf,
-        new ConnectionProvider() {
-          @Override
-          public Connection getConnection() {
-            Connection connection = null;
-            try {
-              connection = DriverManager.getConnection(getUrl());
-            } catch (SQLException e) {
-              LOG.warn("Unable to connect to HBase store using Phoenix.", e);
-            }
-            return connection;
-          }
-        });
-  }
-
-  private TimelineMetrics prepareSingleTimelineMetric(long startTime,
-                                                      String host,
-                                                      String metricName,
-                                                      double val) {
-    TimelineMetrics m = new TimelineMetrics();
-    m.setMetrics(Arrays.asList(
-      createTimelineMetric(startTime, metricName, host, val)));
-
-    return m;
-  }
-
-  private TimelineMetric createTimelineMetric(long startTime,
-                                              String metricName,
-                                              String host,
-                                              double val) {
-    TimelineMetric m = new TimelineMetric();
-    m.setAppId("host");
-    m.setHostName(host);
-    m.setMetricName(metricName);
-    m.setStartTime(startTime);
-    Map<Long, Double> vals = new HashMap<Long, Double>();
-    vals.put(startTime + 15000l, val);
-    vals.put(startTime + 30000l, val);
-    vals.put(startTime + 45000l, val);
-    vals.put(startTime + 60000l, val);
-
-    m.setMetricValues(vals);
-
-    return m;
-  }
 }
 }

+ 6 - 54
ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITMetricAggregator.java

@@ -25,10 +25,8 @@ import org.junit.Before;
 import org.junit.Test;
 import org.junit.Test;
 
 
 import java.sql.Connection;
 import java.sql.Connection;
-import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.ResultSet;
-import java.sql.SQLException;
 import java.sql.Statement;
 import java.sql.Statement;
 import java.util.Arrays;
 import java.util.Arrays;
 import java.util.Comparator;
 import java.util.Comparator;
@@ -41,11 +39,11 @@ import static junit.framework.Assert.fail;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.Condition;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.Condition;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.DefaultCondition;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.DefaultCondition;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.GET_METRIC_AGGREGATE_ONLY_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.GET_METRIC_AGGREGATE_ONLY_SQL;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.LOG;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_AGGREGATE_HOURLY_TABLE_NAME;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_AGGREGATE_HOURLY_TABLE_NAME;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_AGGREGATE_MINUTE_TABLE_NAME;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_AGGREGATE_MINUTE_TABLE_NAME;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.NATIVE_TIME_RANGE_DELTA;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.NATIVE_TIME_RANGE_DELTA;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.assertj.core.api.Assertions.assertThat;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.MetricTestHelper.createEmptyTimelineMetric;
 
 
 public class ITMetricAggregator extends AbstractMiniHBaseClusterTest {
 public class ITMetricAggregator extends AbstractMiniHBaseClusterTest {
   private Connection conn;
   private Connection conn;
@@ -86,7 +84,7 @@ public class ITMetricAggregator extends AbstractMiniHBaseClusterTest {
     hdb.insertMetricRecords(metricsSent);
     hdb.insertMetricRecords(metricsSent);
 
 
     Condition queryCondition = new DefaultCondition(null, "local", null, null,
     Condition queryCondition = new DefaultCondition(null, "local", null, null,
-      startTime, startTime + (15 * 60 * 1000), null, false);
+      startTime, startTime + (15 * 60 * 1000), null, null, false);
     TimelineMetrics recordRead = hdb.getMetricRecords(queryCondition);
     TimelineMetrics recordRead = hdb.getMetricRecords(queryCondition);
 
 
     // THEN
     // THEN
@@ -122,7 +120,7 @@ public class ITMetricAggregator extends AbstractMiniHBaseClusterTest {
 
 
     //THEN
     //THEN
     Condition condition = new DefaultCondition(null, null, null, null, startTime,
     Condition condition = new DefaultCondition(null, null, null, null, startTime,
-      endTime, null, true);
+      endTime, null, null, true);
     condition.setStatement(String.format(GET_METRIC_AGGREGATE_ONLY_SQL,
     condition.setStatement(String.format(GET_METRIC_AGGREGATE_ONLY_SQL,
       PhoenixTransactSQL.getNaiveTimeRangeHint(startTime, NATIVE_TIME_RANGE_DELTA),
       PhoenixTransactSQL.getNaiveTimeRangeHint(startTime, NATIVE_TIME_RANGE_DELTA),
       METRICS_AGGREGATE_MINUTE_TABLE_NAME));
       METRICS_AGGREGATE_MINUTE_TABLE_NAME));
@@ -131,7 +129,7 @@ public class ITMetricAggregator extends AbstractMiniHBaseClusterTest {
       (conn, condition);
       (conn, condition);
     ResultSet rs = pstmt.executeQuery();
     ResultSet rs = pstmt.executeQuery();
     MetricHostAggregate expectedAggregate =
     MetricHostAggregate expectedAggregate =
-      createMetricHostAggregate(2.0, 0.0, 20, 15.0);
+      MetricTestHelper.createMetricHostAggregate(2.0, 0.0, 20, 15.0);
 
 
     int count = 0;
     int count = 0;
     while (rs.next()) {
     while (rs.next()) {
@@ -172,7 +170,7 @@ public class ITMetricAggregator extends AbstractMiniHBaseClusterTest {
     long startTime = System.currentTimeMillis();
     long startTime = System.currentTimeMillis();
 
 
     MetricHostAggregate expectedAggregate =
     MetricHostAggregate expectedAggregate =
-      createMetricHostAggregate(2.0, 0.0, 20, 15.0);
+        MetricTestHelper.createMetricHostAggregate(2.0, 0.0, 20, 15.0);
     Map<TimelineMetric, MetricHostAggregate>
     Map<TimelineMetric, MetricHostAggregate>
       aggMap = new HashMap<TimelineMetric,
       aggMap = new HashMap<TimelineMetric,
       MetricHostAggregate>();
       MetricHostAggregate>();
@@ -201,7 +199,7 @@ public class ITMetricAggregator extends AbstractMiniHBaseClusterTest {
 
 
     //THEN
     //THEN
     Condition condition = new DefaultCondition(null, null, null, null, startTime,
     Condition condition = new DefaultCondition(null, null, null, null, startTime,
-      endTime, null, true);
+      endTime, null, null, true);
     condition.setStatement(String.format(GET_METRIC_AGGREGATE_ONLY_SQL,
     condition.setStatement(String.format(GET_METRIC_AGGREGATE_ONLY_SQL,
       PhoenixTransactSQL.getNaiveTimeRangeHint(startTime, NATIVE_TIME_RANGE_DELTA),
       PhoenixTransactSQL.getNaiveTimeRangeHint(startTime, NATIVE_TIME_RANGE_DELTA),
       METRICS_AGGREGATE_HOURLY_TABLE_NAME));
       METRICS_AGGREGATE_HOURLY_TABLE_NAME));
@@ -226,52 +224,6 @@ public class ITMetricAggregator extends AbstractMiniHBaseClusterTest {
     }
     }
   }
   }
 
 
-  private TimelineMetric createEmptyTimelineMetric(long startTime) {
-    TimelineMetric metric = new TimelineMetric();
-    metric.setMetricName("disk_used");
-    metric.setAppId("test_app");
-    metric.setHostName("test_host");
-    metric.setTimestamp(startTime);
-
-    return metric;
-  }
-
-  private MetricHostAggregate
-  createMetricHostAggregate(double max, double min, int numberOfSamples,
-                            double sum) {
-    MetricHostAggregate expectedAggregate =
-      new MetricHostAggregate();
-    expectedAggregate.setMax(max);
-    expectedAggregate.setMin(min);
-    expectedAggregate.setNumberOfSamples(numberOfSamples);
-    expectedAggregate.setSum(sum);
-
-    return expectedAggregate;
-  }
-
-  private PhoenixHBaseAccessor createTestableHBaseAccessor() {
-    Configuration metricsConf = new Configuration();
-    metricsConf.set(
-      TimelineMetricConfiguration.HBASE_COMPRESSION_SCHEME, "NONE");
-
-    return
-      new PhoenixHBaseAccessor(
-        new Configuration(),
-        metricsConf,
-        new ConnectionProvider() {
-          @Override
-          public Connection getConnection() {
-            Connection connection = null;
-            try {
-              connection = DriverManager.getConnection(getUrl());
-            } catch (SQLException e) {
-              LOG.warn("Unable to connect to HBase store using Phoenix.", e);
-            }
-            return connection;
-          }
-        });
-  }
-
   private final static Comparator<TimelineMetric> TIME_IGNORING_COMPARATOR =
   private final static Comparator<TimelineMetric> TIME_IGNORING_COMPARATOR =
     new Comparator<TimelineMetric>() {
     new Comparator<TimelineMetric>() {
       @Override
       @Override

+ 271 - 0
ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITPhoenixHBaseAccessor.java

@@ -0,0 +1,271 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertTrue;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_AGGREGATE_MINUTE_TABLE_NAME;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.MetricTestHelper.prepareSingleTimelineMetric;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.MetricTestHelper.createEmptyTimelineMetric;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.MetricTestHelper.createMetricHostAggregate;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.MetricTestHelper.createEmptyTimelineClusterMetric;
+
+
+public class ITPhoenixHBaseAccessor extends AbstractMiniHBaseClusterTest {
+  private Connection conn;
+  private PhoenixHBaseAccessor hdb;
+
+  @Before
+  public void setUp() throws Exception {
+    hdb = createTestableHBaseAccessor();
+    // inits connection, starts mini cluster
+    conn = getConnection(getUrl());
+
+    hdb.initMetricSchema();
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    Connection conn = getConnection(getUrl());
+    Statement stmt = conn.createStatement();
+
+    stmt.execute("delete from METRIC_AGGREGATE");
+    stmt.execute("delete from METRIC_AGGREGATE_HOURLY");
+    stmt.execute("delete from METRIC_RECORD");
+    stmt.execute("delete from METRIC_RECORD_HOURLY");
+    stmt.execute("delete from METRIC_RECORD_MINUTE");
+    conn.commit();
+
+    stmt.close();
+    conn.close();
+  }
+
+  @Test
+  public void testGetMetricRecordsSeconds() throws IOException, SQLException {
+    // GIVEN
+    long startTime = System.currentTimeMillis();
+    long ctime = startTime;
+    long minute = 60 * 1000;
+    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime, "local1",
+        "disk_free", 1));
+    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime, "local2",
+        "disk_free", 2));
+    ctime += minute;
+    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime, "local1",
+        "disk_free", 2));
+    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime, "local2",
+        "disk_free", 1));
+
+    // WHEN
+    long endTime = ctime + minute;
+    PhoenixTransactSQL.Condition condition = new PhoenixTransactSQL.DefaultCondition(
+        Collections.singletonList("disk_free"), "local1", null, null, startTime,
+        endTime, Precision.SECONDS, null, true);
+    TimelineMetrics timelineMetrics = hdb.getMetricRecords(condition);
+
+    //THEN
+    assertEquals(1, timelineMetrics.getMetrics().size());
+    TimelineMetric metric = timelineMetrics.getMetrics().get(0);
+
+    assertEquals("disk_free", metric.getMetricName());
+    assertEquals("local1", metric.getHostName());
+    assertEquals(8, metric.getMetricValues().size());
+  }
+
+  @Test
+  public void testGetMetricRecordsMinutes() throws IOException, SQLException {
+    // GIVEN
+    TimelineMetricAggregator aggregatorMinute = TimelineMetricAggregatorFactory
+        .createTimelineMetricAggregatorMinute(hdb, new Configuration());
+
+    long startTime = System.currentTimeMillis();
+    long ctime = startTime;
+    long minute = 60 * 1000;
+    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime, "local1",
+        "disk_free", 1));
+    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime + minute, "local1",
+        "disk_free", 2));
+    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime, "local2",
+        "disk_free", 2));
+    long endTime = ctime + minute;
+    boolean success = aggregatorMinute.doWork(startTime, endTime);
+    assertTrue(success);
+
+    // WHEN
+    PhoenixTransactSQL.Condition condition = new PhoenixTransactSQL.DefaultCondition(
+        Collections.singletonList("disk_free"), "local1", null, null, startTime,
+        endTime, Precision.MINUTES, null, false);
+    TimelineMetrics timelineMetrics = hdb.getMetricRecords(condition);
+
+    //THEN
+    assertEquals(1, timelineMetrics.getMetrics().size());
+    TimelineMetric metric = timelineMetrics.getMetrics().get(0);
+
+    assertEquals("disk_free", metric.getMetricName());
+    assertEquals("local1", metric.getHostName());
+    assertEquals(1, metric.getMetricValues().size());
+    Iterator<Map.Entry<Long, Double>> iterator = metric.getMetricValues().entrySet().iterator();
+    assertEquals(1.5, iterator.next().getValue(), 0.00001);
+  }
+
+  @Test
+  public void testGetMetricRecordsHours() throws IOException, SQLException {
+    // GIVEN
+    TimelineMetricAggregator aggregator = TimelineMetricAggregatorFactory
+        .createTimelineMetricAggregatorHourly(hdb, new Configuration());
+
+    MetricHostAggregate expectedAggregate =
+        createMetricHostAggregate(2.0, 0.0, 20, 15.0);
+    Map<TimelineMetric, MetricHostAggregate>
+        aggMap = new HashMap<TimelineMetric,
+        MetricHostAggregate>();
+
+    long startTime = System.currentTimeMillis();
+    int min_5 = 5 * 60 * 1000;
+    long ctime = startTime - min_5;
+    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
+    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
+    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
+    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
+    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
+    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
+    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
+    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
+    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
+    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
+    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
+    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
+
+    hdb.saveHostAggregateRecords(aggMap, METRICS_AGGREGATE_MINUTE_TABLE_NAME);
+    long endTime = ctime + min_5;
+    boolean success = aggregator.doWork(startTime, endTime);
+    assertTrue(success);
+
+    // WHEN
+    PhoenixTransactSQL.Condition condition = new PhoenixTransactSQL.DefaultCondition(
+        Collections.singletonList("disk_used"), "test_host", "test_app", null,
+        startTime, endTime, Precision.HOURS, null, true);
+    TimelineMetrics timelineMetrics = hdb.getMetricRecords(condition);
+
+    //THEN
+    assertEquals(1, timelineMetrics.getMetrics().size());
+    TimelineMetric metric = timelineMetrics.getMetrics().get(0);
+
+    assertEquals("disk_used", metric.getMetricName());
+    assertEquals("test_host", metric.getHostName());
+    assertEquals(1, metric.getMetricValues().size());
+    Iterator<Map.Entry<Long, Double>> iterator = metric.getMetricValues().entrySet().iterator();
+    assertEquals(0.75, iterator.next().getValue(), 0.00001);
+  }
+
+  @Test
+  public void testGetClusterMetricRecordsSeconds() throws Exception {
+    // GIVEN
+    TimelineMetricClusterAggregator agg =
+        new TimelineMetricClusterAggregator(hdb, new Configuration());
+
+    long startTime = System.currentTimeMillis();
+    long ctime = startTime + 1;
+    long minute = 60 * 1000;
+    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime, "local1",
+        "disk_free", 1));
+    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime, "local2",
+        "disk_free", 2));
+    ctime += minute;
+    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime, "local1",
+        "disk_free", 2));
+    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime, "local2",
+        "disk_free", 1));
+
+    long endTime = ctime + minute + 1;
+    boolean success = agg.doWork(startTime, endTime);
+    assertTrue(success);
+
+    // WHEN
+    PhoenixTransactSQL.Condition condition = new PhoenixTransactSQL.DefaultCondition(
+        Collections.singletonList("disk_free"), null, null, null,
+        startTime, endTime, Precision.SECONDS, null, true);
+    TimelineMetrics timelineMetrics = hdb.getAggregateMetricRecords(condition);
+
+    //THEN
+    assertEquals(1, timelineMetrics.getMetrics().size());
+    TimelineMetric metric = timelineMetrics.getMetrics().get(0);
+
+    assertEquals("disk_free", metric.getMetricName());
+    assertEquals(8, metric.getMetricValues().size());
+    assertEquals(1.5, metric.getMetricValues().values().iterator().next(), 0.00001);
+  }
+
+  @Test
+  public void testGetClusterMetricRecordsHours() throws Exception {
+    // GIVEN
+    TimelineMetricClusterAggregatorHourly agg =
+        new TimelineMetricClusterAggregatorHourly(hdb, new Configuration());
+
+    long startTime = System.currentTimeMillis();
+    long ctime = startTime;
+    long minute = 60 * 1000;
+
+    Map<TimelineClusterMetric, MetricClusterAggregate> records =
+        new HashMap<TimelineClusterMetric, MetricClusterAggregate>();
+
+    records.put(createEmptyTimelineClusterMetric(ctime),
+        new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
+    records.put(createEmptyTimelineClusterMetric(ctime += minute),
+        new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
+    records.put(createEmptyTimelineClusterMetric(ctime += minute),
+        new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
+    records.put(createEmptyTimelineClusterMetric(ctime += minute),
+        new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
+
+    hdb.saveClusterAggregateRecords(records);
+    boolean success = agg.doWork(startTime, ctime + minute);
+    assertTrue(success);
+
+    // WHEN
+    PhoenixTransactSQL.Condition condition = new PhoenixTransactSQL.DefaultCondition(
+        Collections.singletonList("disk_used"), null, null, null,
+        startTime, ctime + minute, Precision.HOURS, null, true);
+    TimelineMetrics timelineMetrics = hdb.getAggregateMetricRecords(condition);
+
+    // THEN
+    assertEquals(1, timelineMetrics.getMetrics().size());
+    TimelineMetric metric = timelineMetrics.getMetrics().get(0);
+
+    assertEquals("disk_used", metric.getMetricName());
+    assertEquals("test_app", metric.getAppId());
+    assertEquals(1, metric.getMetricValues().size());
+    assertEquals(2.0, metric.getMetricValues().values().iterator().next(), 0.00001);
+  }
+}

+ 96 - 0
ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricTestHelper.java

@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
+
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+
+public class MetricTestHelper {
+
+  public static MetricHostAggregate
+  createMetricHostAggregate(double max, double min, int numberOfSamples,
+                            double sum) {
+    MetricHostAggregate expectedAggregate =
+        new MetricHostAggregate();
+    expectedAggregate.setMax(max);
+    expectedAggregate.setMin(min);
+    expectedAggregate.setNumberOfSamples(numberOfSamples);
+    expectedAggregate.setSum(sum);
+
+    return expectedAggregate;
+  }
+
+  public static TimelineMetrics prepareSingleTimelineMetric(long startTime,
+                                                        String host,
+                                                        String metricName,
+                                                        double val) {
+    TimelineMetrics m = new TimelineMetrics();
+    m.setMetrics(Arrays.asList(
+        createTimelineMetric(startTime, metricName, host, val)));
+
+    return m;
+  }
+
+
+  public static TimelineMetric createTimelineMetric(long startTime,
+                                                String metricName,
+                                                String host,
+                                                double val) {
+    TimelineMetric m = new TimelineMetric();
+    m.setAppId("host");
+    m.setHostName(host);
+    m.setMetricName(metricName);
+    m.setStartTime(startTime);
+    Map<Long, Double> vals = new HashMap<Long, Double>();
+    vals.put(startTime + 15000l, val);
+    vals.put(startTime + 30000l, val);
+    vals.put(startTime + 45000l, val);
+    vals.put(startTime + 60000l, val);
+
+    m.setMetricValues(vals);
+
+    return m;
+  }
+
+  public static TimelineMetric createEmptyTimelineMetric(long startTime) {
+    TimelineMetric metric = new TimelineMetric();
+    metric.setMetricName("disk_used");
+    metric.setAppId("test_app");
+    metric.setHostName("test_host");
+    metric.setTimestamp(startTime);
+
+    return metric;
+  }
+
+  public static TimelineClusterMetric createEmptyTimelineClusterMetric(
+      String name, long startTime) {
+    TimelineClusterMetric metric = new TimelineClusterMetric(name,
+        "test_app", null, startTime, null);
+
+    return metric;
+  }
+
+  public static TimelineClusterMetric createEmptyTimelineClusterMetric(
+      long startTime) {
+    return createEmptyTimelineClusterMetric("disk_used", startTime);
+  }
+}

+ 1 - 1
ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestClusterSuite.java

@@ -24,7 +24,7 @@ import org.junit.runners.Suite;
 import static org.junit.runners.Suite.SuiteClasses;
 import static org.junit.runners.Suite.SuiteClasses;
 
 
 @RunWith(Suite.class)
 @RunWith(Suite.class)
-@SuiteClasses({ITMetricAggregator.class, ITClusterAggregator.class})
+@SuiteClasses({ITMetricAggregator.class, ITClusterAggregator.class, ITPhoenixHBaseAccessor.class})
 public class TestClusterSuite {
 public class TestClusterSuite {
 
 
 }
 }

+ 126 - 7
ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java

@@ -17,9 +17,13 @@
  */
  */
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 
 
+import org.easymock.Capture;
 import org.junit.Assert;
 import org.junit.Assert;
 import org.junit.Test;
 import org.junit.Test;
 
 
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
 import java.util.Arrays;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.Collections;
 
 
@@ -28,12 +32,19 @@ import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.ti
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.LikeCondition;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.LikeCondition;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.SplitByMetricNamesCondition;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.SplitByMetricNamesCondition;
 
 
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+
+import org.easymock.EasyMock;
+
 public class TestPhoenixTransactSQL {
 public class TestPhoenixTransactSQL {
   @Test
   @Test
   public void testConditionClause() throws Exception {
   public void testConditionClause() throws Exception {
     Condition condition = new DefaultCondition(
     Condition condition = new DefaultCondition(
       Arrays.asList("cpu_user", "mem_free"), "h1", "a1", "i1",
       Arrays.asList("cpu_user", "mem_free"), "h1", "a1", "i1",
-        1407959718L, 1407959918L, null, false);
+        1407959718L, 1407959918L, null, null, false);
 
 
     String preparedClause = condition.getConditionClause();
     String preparedClause = condition.getConditionClause();
     String expectedClause = "METRIC_NAME IN (?, ?) AND HOSTNAME = ? AND " +
     String expectedClause = "METRIC_NAME IN (?, ?) AND HOSTNAME = ? AND " +
@@ -47,7 +58,7 @@ public class TestPhoenixTransactSQL {
   public void testSplitByMetricNamesCondition() throws Exception {
   public void testSplitByMetricNamesCondition() throws Exception {
     Condition c = new DefaultCondition(
     Condition c = new DefaultCondition(
       Arrays.asList("cpu_user", "mem_free"), "h1", "a1", "i1",
       Arrays.asList("cpu_user", "mem_free"), "h1", "a1", "i1",
-      1407959718L, 1407959918L, null, false);
+      1407959718L, 1407959918L, null, null, false);
 
 
     SplitByMetricNamesCondition condition = new SplitByMetricNamesCondition(c);
     SplitByMetricNamesCondition condition = new SplitByMetricNamesCondition(c);
     condition.setCurrentMetric(c.getMetricNames().get(0));
     condition.setCurrentMetric(c.getMetricNames().get(0));
@@ -64,7 +75,7 @@ public class TestPhoenixTransactSQL {
   public void testLikeConditionClause() throws Exception {
   public void testLikeConditionClause() throws Exception {
     Condition condition = new LikeCondition(
     Condition condition = new LikeCondition(
         Arrays.asList("cpu_user", "mem_free"), "h1", "a1", "i1",
         Arrays.asList("cpu_user", "mem_free"), "h1", "a1", "i1",
-        1407959718L, 1407959918L, null, false);
+        1407959718L, 1407959918L, null, null, false);
 
 
     String preparedClause = condition.getConditionClause();
     String preparedClause = condition.getConditionClause();
     String expectedClause = "(METRIC_NAME LIKE ? OR METRIC_NAME LIKE ?) AND HOSTNAME = ? AND " +
     String expectedClause = "(METRIC_NAME LIKE ? OR METRIC_NAME LIKE ?) AND HOSTNAME = ? AND " +
@@ -76,7 +87,7 @@ public class TestPhoenixTransactSQL {
 
 
     condition = new LikeCondition(
     condition = new LikeCondition(
         Collections.<String>emptyList(), "h1", "a1", "i1",
         Collections.<String>emptyList(), "h1", "a1", "i1",
-        1407959718L, 1407959918L, null, false);
+        1407959718L, 1407959918L, null, null, false);
 
 
     preparedClause = condition.getConditionClause();
     preparedClause = condition.getConditionClause();
     expectedClause = " HOSTNAME = ? AND " +
     expectedClause = " HOSTNAME = ? AND " +
@@ -88,7 +99,7 @@ public class TestPhoenixTransactSQL {
 
 
     condition = new LikeCondition(
     condition = new LikeCondition(
         null, "h1", "a1", "i1",
         null, "h1", "a1", "i1",
-        1407959718L, 1407959918L, null, false);
+        1407959718L, 1407959918L, null, null, false);
 
 
     preparedClause = condition.getConditionClause();
     preparedClause = condition.getConditionClause();
     expectedClause = " HOSTNAME = ? AND " +
     expectedClause = " HOSTNAME = ? AND " +
@@ -100,7 +111,7 @@ public class TestPhoenixTransactSQL {
 
 
     condition = new LikeCondition(
     condition = new LikeCondition(
         Arrays.asList("cpu_user"), "h1", "a1", "i1",
         Arrays.asList("cpu_user"), "h1", "a1", "i1",
-        1407959718L, 1407959918L, null, false);
+        1407959718L, 1407959918L, null, null, false);
 
 
     preparedClause = condition.getConditionClause();
     preparedClause = condition.getConditionClause();
     expectedClause = "(METRIC_NAME LIKE ?) AND HOSTNAME = ? AND " +
     expectedClause = "(METRIC_NAME LIKE ?) AND HOSTNAME = ? AND " +
@@ -112,7 +123,7 @@ public class TestPhoenixTransactSQL {
 
 
     condition = new LikeCondition(
     condition = new LikeCondition(
         Arrays.asList("cpu_user", "mem_free", "cpu_aidle"), "h1", "a1", "i1",
         Arrays.asList("cpu_user", "mem_free", "cpu_aidle"), "h1", "a1", "i1",
-        1407959718L, 1407959918L, null, false);
+        1407959718L, 1407959918L, null, null, false);
 
 
     preparedClause = condition.getConditionClause();
     preparedClause = condition.getConditionClause();
     expectedClause = "(METRIC_NAME LIKE ? OR METRIC_NAME LIKE ? OR METRIC_NAME LIKE ?) AND HOSTNAME = ? AND " +
     expectedClause = "(METRIC_NAME LIKE ? OR METRIC_NAME LIKE ? OR METRIC_NAME LIKE ?) AND HOSTNAME = ? AND " +
@@ -121,4 +132,112 @@ public class TestPhoenixTransactSQL {
     Assert.assertNotNull(preparedClause);
     Assert.assertNotNull(preparedClause);
     Assert.assertEquals(expectedClause, preparedClause);
     Assert.assertEquals(expectedClause, preparedClause);
   }
   }
+
+  @Test
+  public void testPrepareGetAggregatePrecisionMINUTES() throws SQLException {
+    Condition condition = new DefaultCondition(
+        Arrays.asList("cpu_user", "mem_free"), "h1", "a1", "i1",
+        1407959718L, 1407959918L, Precision.MINUTES, null, false);
+    Connection connection = createNiceMock(Connection.class);
+    PreparedStatement preparedStatement = createNiceMock(PreparedStatement.class);
+    Capture<String> stmtCapture = new Capture<String>();
+    expect(connection.prepareStatement(EasyMock.and(EasyMock.anyString(), EasyMock.capture(stmtCapture))))
+        .andReturn(preparedStatement);
+
+    replay(connection, preparedStatement);
+    PhoenixTransactSQL.prepareGetAggregateSqlStmt(connection, condition);
+    String stmt = stmtCapture.getValue();
+    Assert.assertTrue(stmt.contains("FROM METRIC_AGGREGATE"));
+    verify(connection, preparedStatement);
+  }
+
+  @Test
+  public void testPrepareGetAggregateNoPrecision() throws SQLException {
+    Condition condition = new DefaultCondition(
+        Arrays.asList("cpu_user", "mem_free"), "h1", "a1", "i1",
+        1407959718L, 1407959918L, null, null, false);
+    Connection connection = createNiceMock(Connection.class);
+    PreparedStatement preparedStatement = createNiceMock(PreparedStatement.class);
+    Capture<String> stmtCapture = new Capture<String>();
+    expect(connection.prepareStatement(EasyMock.and(EasyMock.anyString(), EasyMock.capture(stmtCapture))))
+        .andReturn(preparedStatement);
+
+    replay(connection, preparedStatement);
+    PhoenixTransactSQL.prepareGetAggregateSqlStmt(connection, condition);
+    String stmt = stmtCapture.getValue();
+    Assert.assertTrue(stmt.contains("FROM METRIC_AGGREGATE"));
+    verify(connection, preparedStatement);
+  }
+
+  @Test
+  public void testPrepareGetAggregatePrecisionHours() throws SQLException {
+    Condition condition = new DefaultCondition(
+        Arrays.asList("cpu_user", "mem_free"), "h1", "a1", "i1",
+        1407959718L, 1407959918L, Precision.HOURS, null, false);
+    Connection connection = createNiceMock(Connection.class);
+    PreparedStatement preparedStatement = createNiceMock(PreparedStatement.class);
+    Capture<String> stmtCapture = new Capture<String>();
+    expect(connection.prepareStatement(EasyMock.and(EasyMock.anyString(), EasyMock.capture(stmtCapture))))
+        .andReturn(preparedStatement);
+
+    replay(connection, preparedStatement);
+    PhoenixTransactSQL.prepareGetAggregateSqlStmt(connection, condition);
+    String stmt = stmtCapture.getValue();
+    Assert.assertTrue(stmt.contains("FROM METRIC_AGGREGATE_HOURLY"));
+    verify(connection, preparedStatement);
+  }
+
+  @Test
+  public void testPrepareGetMetricsPrecisionMinutes() throws SQLException {
+    Condition condition = new DefaultCondition(
+        Arrays.asList("cpu_user", "mem_free"), "h1", "a1", "i1",
+        1407959718L, 1407959918L, Precision.MINUTES, null, false);
+    Connection connection = createNiceMock(Connection.class);
+    PreparedStatement preparedStatement = createNiceMock(PreparedStatement.class);
+    Capture<String> stmtCapture = new Capture<String>();
+    expect(connection.prepareStatement(EasyMock.and(EasyMock.anyString(), EasyMock.capture(stmtCapture))))
+        .andReturn(preparedStatement);
+
+    replay(connection, preparedStatement);
+    PhoenixTransactSQL.prepareGetMetricsSqlStmt(connection, condition);
+    String stmt = stmtCapture.getValue();
+    Assert.assertTrue(stmt.contains("FROM METRIC_RECORD_MINUTE"));
+    verify(connection, preparedStatement);
+  }
+
+  @Test
+  public void testPrepareGetMetricsNoPrecision() throws SQLException {
+    Condition condition = new DefaultCondition(
+        Arrays.asList("cpu_user", "mem_free"), "h1", "a1", "i1",
+        1407959718L, 1407959918L, null, null, false);
+    Connection connection = createNiceMock(Connection.class);
+    PreparedStatement preparedStatement = createNiceMock(PreparedStatement.class);
+    Capture<String> stmtCapture = new Capture<String>();
+    expect(connection.prepareStatement(EasyMock.and(EasyMock.anyString(), EasyMock.capture(stmtCapture))))
+        .andReturn(preparedStatement);
+
+    replay(connection, preparedStatement);
+    PhoenixTransactSQL.prepareGetMetricsSqlStmt(connection, condition);
+    String stmt = stmtCapture.getValue();
+    Assert.assertTrue(stmt.contains("FROM METRIC_RECORD"));
+    verify(connection, preparedStatement);
+  }
+
+  @Test
+  public void testPrepareGetMetricsPrecisionHours() throws SQLException {
+    Condition condition = new DefaultCondition(
+        Arrays.asList("cpu_user", "mem_free"), "h1", "a1", "i1",
+        1407959718L, 1407959918L, Precision.HOURS, null, false);
+    Connection connection = createNiceMock(Connection.class);
+    PreparedStatement preparedStatement = createNiceMock(PreparedStatement.class);
+    Capture<String> stmtCapture = new Capture<String>();
+    expect(connection.prepareStatement(EasyMock.and(EasyMock.anyString(), EasyMock.capture(stmtCapture))))
+        .andReturn(preparedStatement);
+
+    replay(connection, preparedStatement);
+    PhoenixTransactSQL.prepareGetMetricsSqlStmt(connection, condition);
+    String stmt = stmtCapture.getValue();
+    Assert.assertTrue(stmt.contains("FROM METRIC_RECORD_HOURLY"));
+    verify(connection, preparedStatement);
+  }
 }
 }

+ 2 - 2
ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java

@@ -30,7 +30,7 @@ public class TestTimelineMetricStore implements TimelineMetricStore {
   @Override
   @Override
   public TimelineMetrics getTimelineMetrics(List<String> metricNames,
   public TimelineMetrics getTimelineMetrics(List<String> metricNames,
       String hostname, String applicationId, String instanceId, Long startTime,
       String hostname, String applicationId, String instanceId, Long startTime,
-      Long endTime, Integer limit, boolean groupedByHost) throws SQLException,
+      Long endTime, Precision precision, Integer limit, boolean groupedByHost) throws SQLException,
     IOException {
     IOException {
     TimelineMetrics timelineMetrics = new TimelineMetrics();
     TimelineMetrics timelineMetrics = new TimelineMetrics();
     List<TimelineMetric> metricList = new ArrayList<TimelineMetric>();
     List<TimelineMetric> metricList = new ArrayList<TimelineMetric>();
@@ -67,7 +67,7 @@ public class TestTimelineMetricStore implements TimelineMetricStore {
   @Override
   @Override
   public TimelineMetric getTimelineMetric(String metricName, String hostname,
   public TimelineMetric getTimelineMetric(String metricName, String hostname,
       String applicationId, String instanceId, Long startTime, Long endTime,
       String applicationId, String instanceId, Long startTime, Long endTime,
-      Integer limit) throws SQLException, IOException {
+      Precision precision, Integer limit) throws SQLException, IOException {
 
 
     return null;
     return null;
   }
   }

+ 1 - 1
ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java

@@ -382,7 +382,7 @@ public class TestTimelineWebServices extends JerseyTest {
   public void testGetMetrics() throws Exception {
   public void testGetMetrics() throws Exception {
     WebResource r = resource();
     WebResource r = resource();
     ClientResponse response = r.path("ws").path("v1").path("timeline")
     ClientResponse response = r.path("ws").path("v1").path("timeline")
-      .path("metrics").queryParam("metricNames", "cpu_user")
+      .path("metrics").queryParam("metricNames", "cpu_user").queryParam("precision", "seconds")
       .accept(MediaType.APPLICATION_JSON)
       .accept(MediaType.APPLICATION_JSON)
       .get(ClientResponse.class);
       .get(ClientResponse.class);
     assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
     assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());

+ 3 - 1
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java

@@ -95,6 +95,8 @@ public abstract class AbstractProviderModule implements ProviderModule,
   private static final Map<Service.Type, Map<String, String[]>> serviceDesiredProperties = new EnumMap<Service.Type, Map<String, String[]>>(Service.Type.class);
   private static final Map<Service.Type, Map<String, String[]>> serviceDesiredProperties = new EnumMap<Service.Type, Map<String, String[]>>(Service.Type.class);
   private static final Map<String, Service.Type> componentServiceMap = new HashMap<String, Service.Type>();
   private static final Map<String, Service.Type> componentServiceMap = new HashMap<String, Service.Type>();
 
 
+  private static final String COLLECTOR_DEFAULT_PORT = "6188";
+
   private static final Map<String, Map<String, String[]>> jmxDesiredProperties = new HashMap<String, Map<String, String[]>>();
   private static final Map<String, Map<String, String[]>> jmxDesiredProperties = new HashMap<String, Map<String, String[]>>();
   private volatile Map<String, String> clusterCoreSiteConfigVersionMap = new HashMap<String, String>();
   private volatile Map<String, String> clusterCoreSiteConfigVersionMap = new HashMap<String, String>();
   private volatile Map<String, String> clusterJmxProtocolMap = new HashMap<String, String>();
   private volatile Map<String, String> clusterJmxProtocolMap = new HashMap<String, String>();
@@ -351,7 +353,7 @@ public abstract class AbstractProviderModule implements ProviderModule,
           if (!configProperties.isEmpty()) {
           if (!configProperties.isEmpty()) {
             clusterMetricServerPort = getPortString(configProperties.get("METRIC_COLLECTOR"));
             clusterMetricServerPort = getPortString(configProperties.get("METRIC_COLLECTOR"));
           } else {
           } else {
-            clusterMetricServerPort = "8188";
+            clusterMetricServerPort = COLLECTOR_DEFAULT_PORT;
           }
           }
         }
         }
 
 

+ 2 - 1
ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProvider.java

@@ -55,6 +55,7 @@ public abstract class AMSPropertyProvider extends MetricsPropertyProvider {
   private static ObjectMapper mapper;
   private static ObjectMapper mapper;
   private final static ObjectReader timelineObjectReader;
   private final static ObjectReader timelineObjectReader;
   private static final String METRIC_REGEXP_PATTERN = "\\([^)]*\\)";
   private static final String METRIC_REGEXP_PATTERN = "\\([^)]*\\)";
+  private static final int COLLECTOR_DEFAULT_PORT = 6188;
 
 
   static {
   static {
     TIMELINE_APPID_MAP.put("HBASE_MASTER", "HBASE");
     TIMELINE_APPID_MAP.put("HBASE_MASTER", "HBASE");
@@ -390,7 +391,7 @@ public abstract class AMSPropertyProvider extends MetricsPropertyProvider {
             if (metricsRequest == null) {
             if (metricsRequest == null) {
               metricsRequest = new MetricsRequest(temporalInfo,
               metricsRequest = new MetricsRequest(temporalInfo,
                 getAMSUriBuilder(collectorHostName,
                 getAMSUriBuilder(collectorHostName,
-                  collectorPort != null ? Integer.parseInt(collectorPort) : 8188));
+                  collectorPort != null ? Integer.parseInt(collectorPort) : COLLECTOR_DEFAULT_PORT));
               requests.put(temporalInfo, metricsRequest);
               requests.put(temporalInfo, metricsRequest);
             }
             }
             metricsRequest.putResource(getHostName(resource), resource);
             metricsRequest.putResource(getHostName(resource), resource);