|
@@ -31,6 +31,8 @@ import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.
|
|
|
import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.Condition;
|
|
|
import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultCondition;
|
|
|
import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL;
|
|
|
+import org.apache.log4j.Level;
|
|
|
+import org.apache.log4j.Logger;
|
|
|
import org.junit.After;
|
|
|
import org.junit.Before;
|
|
|
import org.junit.Test;
|
|
@@ -64,6 +66,7 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
|
|
|
|
|
|
@Before
|
|
|
public void setUp() throws Exception {
|
|
|
+ Logger.getLogger("org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline").setLevel(Level.DEBUG);
|
|
|
hdb = createTestableHBaseAccessor();
|
|
|
// inits connection, starts mini cluster
|
|
|
conn = getConnection(getUrl());
|
|
@@ -87,11 +90,17 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
|
|
|
conn.close();
|
|
|
}
|
|
|
|
|
|
+ private Configuration getConfigurationForTest(boolean useGroupByAggregators) {
|
|
|
+ Configuration configuration = new Configuration();
|
|
|
+ configuration.set("timeline.metrics.service.use.groupBy.aggregators", String.valueOf(useGroupByAggregators));
|
|
|
+ return configuration;
|
|
|
+ }
|
|
|
+
|
|
|
@Test
|
|
|
public void testShouldAggregateClusterProperly() throws Exception {
|
|
|
// GIVEN
|
|
|
TimelineMetricAggregator agg =
|
|
|
- TimelineMetricAggregatorFactory.createTimelineClusterAggregatorMinute(hdb, new Configuration());
|
|
|
+ TimelineMetricAggregatorFactory.createTimelineClusterAggregatorMinute(hdb, getConfigurationForTest(false));
|
|
|
TimelineMetricReadHelper readHelper = new TimelineMetricReadHelper(false);
|
|
|
|
|
|
long startTime = System.currentTimeMillis();
|
|
@@ -143,7 +152,7 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
|
|
|
public void testShouldAggregateClusterIgnoringInstance() throws Exception {
|
|
|
// GIVEN
|
|
|
TimelineMetricAggregator agg =
|
|
|
- TimelineMetricAggregatorFactory.createTimelineClusterAggregatorMinute(hdb, new Configuration());
|
|
|
+ TimelineMetricAggregatorFactory.createTimelineClusterAggregatorMinute(hdb, getConfigurationForTest(false));
|
|
|
TimelineMetricReadHelper readHelper = new TimelineMetricReadHelper(false);
|
|
|
|
|
|
long startTime = System.currentTimeMillis();
|
|
@@ -218,7 +227,7 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
|
|
|
public void testShouldAggregateDifferentMetricsOnClusterProperly() throws Exception {
|
|
|
// GIVEN
|
|
|
TimelineMetricAggregator agg =
|
|
|
- TimelineMetricAggregatorFactory.createTimelineClusterAggregatorMinute(hdb, new Configuration());
|
|
|
+ TimelineMetricAggregatorFactory.createTimelineClusterAggregatorMinute(hdb, getConfigurationForTest(false));
|
|
|
TimelineMetricReadHelper readHelper = new TimelineMetricReadHelper(false);
|
|
|
|
|
|
// here we put some metrics tha will be aggregated
|
|
@@ -282,7 +291,7 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
|
|
|
public void testAggregateDailyClusterMetrics() throws Exception {
|
|
|
// GIVEN
|
|
|
TimelineMetricAggregator agg =
|
|
|
- TimelineMetricAggregatorFactory.createTimelineClusterAggregatorDaily(hdb, new Configuration());
|
|
|
+ TimelineMetricAggregatorFactory.createTimelineClusterAggregatorDaily(hdb, getConfigurationForTest(false));
|
|
|
|
|
|
// this time can be virtualized! or made independent from real clock
|
|
|
long startTime = System.currentTimeMillis();
|
|
@@ -327,7 +336,7 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
|
|
|
public void testShouldAggregateClusterOnHourProperly() throws Exception {
|
|
|
// GIVEN
|
|
|
TimelineMetricAggregator agg =
|
|
|
- TimelineMetricAggregatorFactory.createTimelineClusterAggregatorHourly(hdb, new Configuration());
|
|
|
+ TimelineMetricAggregatorFactory.createTimelineClusterAggregatorHourly(hdb, getConfigurationForTest(false));
|
|
|
|
|
|
// this time can be virtualized! or made independent from real clock
|
|
|
long startTime = System.currentTimeMillis();
|
|
@@ -371,7 +380,7 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
|
|
|
public void testShouldAggregateDifferentMetricsOnHourProperly() throws Exception {
|
|
|
// GIVEN
|
|
|
TimelineMetricAggregator agg =
|
|
|
- TimelineMetricAggregatorFactory.createTimelineClusterAggregatorHourly(hdb, new Configuration());
|
|
|
+ TimelineMetricAggregatorFactory.createTimelineClusterAggregatorHourly(hdb, getConfigurationForTest(false));
|
|
|
|
|
|
long startTime = System.currentTimeMillis();
|
|
|
long ctime = startTime;
|
|
@@ -431,7 +440,7 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
|
|
|
|
|
|
@Test
|
|
|
public void testAppLevelHostMetricAggregates() throws Exception {
|
|
|
- Configuration conf = new Configuration();
|
|
|
+ Configuration conf = getConfigurationForTest(false);
|
|
|
conf.set(CLUSTER_AGGREGATOR_APP_IDS, "app1");
|
|
|
TimelineMetricAggregator agg =
|
|
|
TimelineMetricAggregatorFactory.createTimelineClusterAggregatorMinute(hdb, conf);
|
|
@@ -485,7 +494,7 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
|
|
|
@Test
|
|
|
public void testClusterAggregateMetricNormalization() throws Exception {
|
|
|
TimelineMetricAggregator agg =
|
|
|
- TimelineMetricAggregatorFactory.createTimelineClusterAggregatorMinute(hdb, new Configuration());
|
|
|
+ TimelineMetricAggregatorFactory.createTimelineClusterAggregatorMinute(hdb, getConfigurationForTest(false));
|
|
|
TimelineMetricReadHelper readHelper = new TimelineMetricReadHelper(false);
|
|
|
|
|
|
// Sample data
|
|
@@ -558,6 +567,66 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
|
|
|
Assert.assertEquals(9, recordCount);
|
|
|
}
|
|
|
|
|
|
+ @Test
|
|
|
+ public void testAggregationUsingGroupByQuery() throws Exception {
|
|
|
+ // GIVEN
|
|
|
+ TimelineMetricAggregator agg =
|
|
|
+ TimelineMetricAggregatorFactory.createTimelineClusterAggregatorHourly(hdb, getConfigurationForTest(true));
|
|
|
+
|
|
|
+ long startTime = System.currentTimeMillis();
|
|
|
+ long ctime = startTime;
|
|
|
+ long minute = 60 * 1000;
|
|
|
+
|
|
|
+ Map<TimelineClusterMetric, MetricClusterAggregate> records =
|
|
|
+ new HashMap<TimelineClusterMetric, MetricClusterAggregate>();
|
|
|
+
|
|
|
+ records.put(createEmptyTimelineClusterMetric("disk_used", ctime += minute),
|
|
|
+ new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
|
|
|
+ records.put(createEmptyTimelineClusterMetric("disk_free", ctime),
|
|
|
+ new MetricClusterAggregate(1.0, 2, 0.0, 1.0, 1.0));
|
|
|
+
|
|
|
+ records.put(createEmptyTimelineClusterMetric("disk_used", ctime += minute),
|
|
|
+ new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
|
|
|
+ records.put(createEmptyTimelineClusterMetric("disk_free", ctime),
|
|
|
+ new MetricClusterAggregate(1.0, 2, 0.0, 1.0, 1.0));
|
|
|
+
|
|
|
+ records.put(createEmptyTimelineClusterMetric("disk_used", ctime += minute),
|
|
|
+ new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
|
|
|
+ records.put(createEmptyTimelineClusterMetric("disk_free", ctime),
|
|
|
+ new MetricClusterAggregate(1.0, 2, 0.0, 1.0, 1.0));
|
|
|
+
|
|
|
+ records.put(createEmptyTimelineClusterMetric("disk_used", ctime += minute),
|
|
|
+ new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
|
|
|
+ records.put(createEmptyTimelineClusterMetric("disk_free", ctime),
|
|
|
+ new MetricClusterAggregate(1.0, 2, 0.0, 1.0, 1.0));
|
|
|
+
|
|
|
+ hdb.saveClusterAggregateRecords(records);
|
|
|
+
|
|
|
+ // WHEN
|
|
|
+ agg.doWork(startTime, ctime + minute);
|
|
|
+
|
|
|
+ // THEN
|
|
|
+ ResultSet rs = executeQuery("SELECT * FROM METRIC_AGGREGATE_HOURLY");
|
|
|
+ int count = 0;
|
|
|
+ while (rs.next()) {
|
|
|
+ if ("disk_used".equals(rs.getString("METRIC_NAME"))) {
|
|
|
+ assertEquals("APP_ID", "test_app", rs.getString("APP_ID"));
|
|
|
+ assertEquals("METRIC_SUM", 16.0, rs.getDouble("METRIC_SUM"));
|
|
|
+ assertEquals("METRIC_COUNT", 8, rs.getLong("METRIC_COUNT"));
|
|
|
+ assertEquals("METRIC_MAX", 4.0, rs.getDouble("METRIC_MAX"));
|
|
|
+ assertEquals("METRIC_MIN", 0.0, rs.getDouble("METRIC_MIN"));
|
|
|
+ } else if ("disk_free".equals(rs.getString("METRIC_NAME"))) {
|
|
|
+ assertEquals("APP_ID", "test_app", rs.getString("APP_ID"));
|
|
|
+ assertEquals("METRIC_SUM", 4.0, rs.getDouble("METRIC_SUM"));
|
|
|
+ assertEquals("METRIC_COUNT", 8, rs.getLong("METRIC_COUNT"));
|
|
|
+ assertEquals("METRIC_MAX", 1.0, rs.getDouble("METRIC_MAX"));
|
|
|
+ assertEquals("METRIC_MIN", 1.0, rs.getDouble("METRIC_MIN"));
|
|
|
+ }
|
|
|
+ count++;
|
|
|
+ }
|
|
|
+ assertEquals("Two hourly aggregated row expected ", 2, count);
|
|
|
+ }
|
|
|
+
|
|
|
private ResultSet executeQuery(String query) throws SQLException {
|
|
|
Connection conn = getConnection(getUrl());
|
|
|
Statement stmt = conn.createStatement();
|