Browse Source

AMBARI-15050 Https Support for Metrics System (dsen)

Dmytro Sen 9 years ago
parent
commit
7e75e52a91
64 changed files with 942 additions and 560 deletions
  1. 86 6
      ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
  2. 1 2
      ambari-metrics/ambari-metrics-flume-sink/src/main/conf/flume-metrics2.properties.j2
  3. 7 4
      ambari-metrics/ambari-metrics-flume-sink/src/main/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java
  4. 4 4
      ambari-metrics/ambari-metrics-hadoop-sink/src/main/conf/hadoop-metrics2-hbase.properties.j2
  5. 11 11
      ambari-metrics/ambari-metrics-hadoop-sink/src/main/conf/hadoop-metrics2.properties.j2
  6. 9 4
      ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
  7. 3 3
      ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
  8. 1 0
      ambari-metrics/ambari-metrics-host-monitoring/conf/unix/metric_monitor.ini
  9. 5 1
      ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/config_reader.py
  10. 9 9
      ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/emitter.py
  11. 15 2
      ambari-metrics/ambari-metrics-kafka-sink/src/main/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporter.java
  12. 1 1
      ambari-metrics/ambari-metrics-kafka-sink/src/test/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporterTest.java
  13. 13 11
      ambari-metrics/ambari-metrics-storm-sink/src/main/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsReporter.java
  14. 7 1
      ambari-metrics/ambari-metrics-storm-sink/src/main/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsSink.java
  15. 6 5
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
  16. 1 5
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
  17. 4 2
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
  18. 7 7
      ambari-server/src/main/java/org/apache/ambari/server/configuration/ComponentSSLConfiguration.java
  19. 3 3
      ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
  20. 1 1
      ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaPropertyProvider.java
  21. 1 1
      ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaReportPropertyProvider.java
  22. 4 4
      ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProvider.java
  23. 1 1
      ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSReportPropertyProvider.java
  24. 1 0
      ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/metainfo.xml
  25. 7 0
      ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
  26. 6 1
      ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/templates/hadoop-metrics2-accumulo.properties.j2
  27. 11 1
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
  28. 37 0
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-ssl-client.xml
  29. 64 0
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-ssl-server.xml
  30. 2 0
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/metainfo.xml
  31. 8 0
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams.py
  32. 1 1
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana_util.py
  33. 10 0
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
  34. 13 4
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py
  35. 6 1
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
  36. 1 0
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metric_monitor.ini.j2
  37. 1 0
      ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/metainfo.xml
  38. 7 0
      ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py
  39. 6 2
      ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/templates/flume-metrics2.properties.j2
  40. 1 0
      ambari-server/src/main/resources/common-services/HAWQ/2.0.0/metainfo.xml
  41. 1 0
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/metainfo.xml
  42. 8 0
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
  43. 6 1
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2
  44. 6 1
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2
  45. 1 0
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml
  46. 21 0
      ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/configuration/kafka-broker.xml
  47. 1 0
      ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/metainfo.xml
  48. 4 0
      ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka.py
  49. 9 1
      ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
  50. 1 0
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/metainfo.xml
  51. 9 2
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
  52. 6 2
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/templates/config.yaml.j2
  53. 7 3
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/templates/storm-metrics2.properties.j2
  54. 2 0
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/metainfo.xml
  55. 8 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
  56. 15 10
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/hadoop-metrics2.properties.j2
  57. 3 3
      ambari-server/src/test/java/org/apache/ambari/server/configuration/ComponentSSLConfigurationTest.java
  58. 13 13
      ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaPropertyProviderTest.java
  59. 1 1
      ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaReportPropertyProviderTest.java
  60. 15 15
      ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProviderTest.java
  61. 2 2
      ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/timeline/AMSReportPropertyProviderTest.java
  62. 8 0
      ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
  63. 406 408
      ambari-server/src/test/python/stacks/2.0.6/configs/default.json
  64. 7 0
      ambari-server/src/test/python/stacks/2.0.6/configs/default_ams_embedded.json

+ 86 - 6
ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java

@@ -24,21 +24,35 @@ import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jackson.map.annotate.JsonSerialize;
 import org.codehaus.jackson.xc.JaxbAnnotationIntrospector;
 
+import javax.net.ssl.HttpsURLConnection;
+import javax.net.ssl.SSLContext;
+import javax.net.ssl.SSLSocketFactory;
+import javax.net.ssl.TrustManagerFactory;
+import java.io.File;
+import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.net.HttpURLConnection;
 import java.net.URL;
+import java.security.KeyStore;
 
 public abstract class AbstractTimelineMetricsSink {
   public static final String TAGS_FOR_PREFIX_PROPERTY_PREFIX = "tagsForPrefix.";
   public static final String MAX_METRIC_ROW_CACHE_SIZE = "maxRowCacheSize";
   public static final String METRICS_SEND_INTERVAL = "sendInterval";
   public static final String METRICS_POST_TIMEOUT_SECONDS = "timeout";
-  public static final String COLLECTOR_HOST_PROPERTY = "collector";
-  public static final String COLLECTOR_PORT_PROPERTY = "port";
+  public static final String COLLECTOR_PROPERTY = "collector";
   public static final int DEFAULT_POST_TIMEOUT_SECONDS = 10;
   public static final String SKIP_COUNTER_TRANSFROMATION = "skipCounterDerivative";
 
+  public static final String WS_V1_TIMELINE_METRICS = "/ws/v1/timeline/metrics";
+
+  public static final String SSL_KEYSTORE_PATH_PROPERTY = "truststore.path";
+  public static final String SSL_KEYSTORE_TYPE_PROPERTY = "truststore.type";
+  public static final String SSL_KEYSTORE_PASSWORD_PROPERTY = "truststore.password";
+
+  private SSLSocketFactory sslSocketFactory;
+
   protected final Log LOG;
 
   protected static ObjectMapper mapper;
@@ -48,7 +62,7 @@ public abstract class AbstractTimelineMetricsSink {
     AnnotationIntrospector introspector = new JaxbAnnotationIntrospector();
     mapper.setAnnotationIntrospector(introspector);
     mapper.getSerializationConfig()
-        .setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
+      .withSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
   }
 
   public AbstractTimelineMetricsSink() {
@@ -59,9 +73,13 @@ public abstract class AbstractTimelineMetricsSink {
     String connectUrl = getCollectorUri();
     int timeout = getTimeoutSeconds() * 1000;
     try {
+      if (connectUrl == null) {
+        throw new IOException("Unknown URL. " +
+          "Unable to connect to metrics collector.");
+      }
       String jsonData = mapper.writeValueAsString(metrics);
-
-      HttpURLConnection connection = (HttpURLConnection) new URL(connectUrl).openConnection();
+      HttpURLConnection connection = connectUrl.startsWith("https") ?
+        getSSLConnection(connectUrl) : getConnection(connectUrl);
 
       connection.setRequestMethod("POST");
       connection.setRequestProperty("Content-Type", "application/json");
@@ -81,13 +99,75 @@ public abstract class AbstractTimelineMetricsSink {
         LOG.info("Unable to POST metrics to collector, " + connectUrl + ", " +
           "statusCode = " + statusCode);
       } else {
-        LOG.debug("Metrics posted to Collector " + connectUrl);
+        if (LOG.isDebugEnabled()) {
+          LOG.debug("Metrics posted to Collector " + connectUrl);
+        }
       }
     } catch (IOException e) {
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Unable to connect to collector, " + connectUrl, e);
+      } else {
+        LOG.info("Unable to connect to collector, " + connectUrl);
+      }
       throw new UnableToConnectException(e).setConnectUrl(connectUrl);
     }
   }
 
+  // Get a connection
+  protected HttpURLConnection getConnection(String spec) throws IOException {
+    return (HttpURLConnection) new URL(spec).openConnection();
+  }
+
+  // Get an ssl connection
+  protected HttpsURLConnection getSSLConnection(String spec)
+    throws IOException, IllegalStateException {
+
+    HttpsURLConnection connection = (HttpsURLConnection) (new URL(spec)
+      .openConnection());
+
+    connection.setSSLSocketFactory(sslSocketFactory);
+
+    return connection;
+  }
+
+  protected void loadTruststore(String trustStorePath, String trustStoreType,
+                                String trustStorePassword) {
+    if (sslSocketFactory == null) {
+      if (trustStorePath == null || trustStorePassword == null) {
+
+        String msg =
+          String.format("Can't load TrustStore. " +
+            "Truststore path or password is not set.");
+
+        LOG.error(msg);
+        throw new IllegalStateException(msg);
+      }
+      FileInputStream in = null;
+      try {
+        in = new FileInputStream(new File(trustStorePath));
+        KeyStore store = KeyStore.getInstance(trustStoreType == null ?
+          KeyStore.getDefaultType() : trustStoreType);
+        store.load(in, trustStorePassword.toCharArray());
+        TrustManagerFactory tmf = TrustManagerFactory
+          .getInstance(TrustManagerFactory.getDefaultAlgorithm());
+        tmf.init(store);
+        SSLContext context = SSLContext.getInstance("TLS");
+        context.init(null, tmf.getTrustManagers(), null);
+        sslSocketFactory = context.getSocketFactory();
+      } catch (Exception e) {
+        LOG.error("Unable to load TrustStore", e);
+      } finally {
+        if (in != null) {
+          try {
+            in.close();
+          } catch (IOException e) {
+            LOG.error("Unable to load TrustStore", e);
+          }
+        }
+      }
+    }
+  }
+
   abstract protected String getCollectorUri();
 
   abstract protected int getTimeoutSeconds();

+ 1 - 2
ambari-metrics/ambari-metrics-flume-sink/src/main/conf/flume-metrics2.properties.j2

@@ -16,8 +16,7 @@
 # limitations under the License.
 #}
 
-collector={{metric_collector_host}}
-port={{metric_collector_port}}
+collector=http://localhost:6188
 collectionFrequency=60000
 maxRowCacheSize=10000
 sendInterval=59000

+ 7 - 4
ambari-metrics/ambari-metrics-flume-sink/src/main/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java

@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.metrics2.sink.flume;
 
-import org.apache.commons.lang.ClassUtils;
 import org.apache.commons.lang.math.NumberUtils;
 import org.apache.flume.Context;
 import org.apache.flume.FlumeException;
@@ -96,9 +95,13 @@ public class FlumeTimelineMetricsSink extends AbstractTimelineMetricsSink implem
     metricsSendInterval = Integer.parseInt(configuration.getProperty(METRICS_SEND_INTERVAL,
         String.valueOf(TimelineMetricsCache.MAX_EVICTION_TIME_MILLIS)));
     metricsCaches = new HashMap<String, TimelineMetricsCache>();
-    String collectorHostname = configuration.getProperty(COLLECTOR_HOST_PROPERTY);
-    String port = configuration.getProperty(COLLECTOR_PORT_PROPERTY);
-    collectorUri = "http://" + collectorHostname + ":" + port + "/ws/v1/timeline/metrics";
+    collectorUri = configuration.getProperty(COLLECTOR_PROPERTY) + WS_V1_TIMELINE_METRICS;
+    if (collectorUri.toLowerCase().startsWith("https://")) {
+      String trustStorePath = configuration.getProperty(SSL_KEYSTORE_PATH_PROPERTY).trim();
+      String trustStoreType = configuration.getProperty(SSL_KEYSTORE_TYPE_PROPERTY).trim();
+      String trustStorePwd = configuration.getProperty(SSL_KEYSTORE_PASSWORD_PROPERTY).trim();
+      loadTruststore(trustStorePath, trustStoreType, trustStorePwd);
+    }
     pollFrequency = Long.parseLong(configuration.getProperty("collectionFrequency"));
 
     String[] metrics = configuration.getProperty(COUNTER_METRICS_PROPERTY).trim().split(",");

+ 4 - 4
ambari-metrics/ambari-metrics-hadoop-sink/src/main/conf/hadoop-metrics2-hbase.properties.j2

@@ -31,19 +31,19 @@ hbase.extendedperiod = 3600
 # Configuration of the "hbase" context for timeline metrics service
 hbase.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 hbase.period=10
-hbase.collector={{timeline_server_hosts}}:8188
+hbase.collector={{timeline_server_hosts}}:6188
 
 # Configuration of the "jvm" context for timeline metrics service
 jvm.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 jvm.period=10
-jvm.collector={{timeline_server_hosts}}:8188
+jvm.collector={{timeline_server_hosts}}:6188
 
 # Configuration of the "rpc" context for timeline metrics service
 rpc.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 rpc.period=10
-rpc.collector={{timeline_server_hosts}}:8188
+rpc.collector={{timeline_server_hosts}}:6188
 
 # Following hadoop example
 hbase.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 hbase.sink.timeline.period=10
-hbase.sink.timeline.collector={{timeline_server_hosts}}:8188
+hbase.sink.timeline.collector=http://{{timeline_server_hosts}}:6188

+ 11 - 11
ambari-metrics/ambari-metrics-hadoop-sink/src/main/conf/hadoop-metrics2.properties.j2

@@ -42,17 +42,17 @@
 
 
 # Hook up to the server
-datanode.sink.timeline.collector={{timeline_server_hosts}}:8188
-namenode.sink.timeline.collector={{timeline_server_hosts}}:8188
-resourcemanager.sink.timeline.collector={{timeline_server_hosts}}:8188
-nodemanager.sink.timeline.collector={{timeline_server_hosts}}:8188
-historyserver.sink.timeline.collector={{timeline_server_hosts}}:8188
-journalnode.sink.timeline.collector={{timeline_server_hosts}}:8188
-nimbus.sink.timeline.collector={{timeline_server_hosts}}:8188
-supervisor.sink.timeline.collector={{timeline_server_hosts}}:8188
-maptask.sink.timeline.collector={{timeline_server_hosts}}:8188
-reducetask.sink.timeline.collector={{timeline_server_hosts}}:8188
+datanode.sink.timeline.collector=http://localhost:6188
+namenode.sink.timeline.collector=http://localhost:6188
+resourcemanager.sink.timeline.collector=http://localhost:6188
+nodemanager.sink.timeline.collector=http://localhost:6188
+historyserver.sink.timeline.collector=http://localhost:6188
+journalnode.sink.timeline.collector=http://localhost:6188
+nimbus.sink.timeline.collector=http://localhost:6188
+supervisor.sink.timeline.collector=http://localhost:6188
+maptask.sink.timeline.collector=http://localhost:6188
+reducetask.sink.timeline.collector=http://localhost:6188
 
 resourcemanager.sink.ganglia.tagsForPrefix.yarn=Queue
 
-{% endif %}
+{% endif %}

+ 9 - 4
ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java

@@ -29,7 +29,6 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.commons.configuration.SubsetConfiguration;
-import org.apache.commons.lang.ClassUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -79,13 +78,19 @@ public class HadoopTimelineMetricsSink extends AbstractTimelineMetricsSink imple
     LOG.info("Identified hostname = " + hostName + ", serviceName = " + serviceName);
 
     // Load collector configs
-    metricsServers = Servers.parse(conf.getString(COLLECTOR_HOST_PROPERTY), 6188);
+    metricsServers = Servers.parse(conf.getString(COLLECTOR_PROPERTY), 6188);
 
     if (metricsServers == null || metricsServers.isEmpty()) {
       LOG.error("No Metric collector configured.");
     } else {
-      collectorUri = "http://" + conf.getString(COLLECTOR_HOST_PROPERTY).trim()
-          + "/ws/v1/timeline/metrics";
+      collectorUri = conf.getString(COLLECTOR_PROPERTY).trim()
+          + WS_V1_TIMELINE_METRICS;
+      if (collectorUri.toLowerCase().startsWith("https://")) {
+        String trustStorePath = conf.getString(SSL_KEYSTORE_PATH_PROPERTY).trim();
+        String trustStoreType = conf.getString(SSL_KEYSTORE_TYPE_PROPERTY).trim();
+        String trustStorePwd = conf.getString(SSL_KEYSTORE_PASSWORD_PROPERTY).trim();
+        loadTruststore(trustStorePath, trustStoreType, trustStorePwd);
+      }
     }
 
     LOG.info("Collector Uri: " + collectorUri);

+ 3 - 3
ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java

@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.metrics2.sink.timeline;
 
-import static org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink.COLLECTOR_HOST_PROPERTY;
+import static org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink.COLLECTOR_PROPERTY;
 import static org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink.MAX_METRIC_ROW_CACHE_SIZE;
 import static org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink.METRICS_SEND_INTERVAL;
 import static org.easymock.EasyMock.anyInt;
@@ -62,7 +62,7 @@ public class HadoopTimelineMetricsSinkTest {
     expect(conf.getString(eq("slave.host.name"))).andReturn("testhost").anyTimes();
     expect(conf.getParent()).andReturn(null).anyTimes();
     expect(conf.getPrefix()).andReturn("service").anyTimes();
-    expect(conf.getString(eq(COLLECTOR_HOST_PROPERTY))).andReturn("localhost:63188").anyTimes();
+    expect(conf.getString(eq(COLLECTOR_PROPERTY))).andReturn("localhost:63188").anyTimes();
     expect(conf.getString(eq("serviceName-prefix"), eq(""))).andReturn("").anyTimes();
 
     expect(conf.getInt(eq(MAX_METRIC_ROW_CACHE_SIZE), anyInt())).andReturn(10).anyTimes();
@@ -130,7 +130,7 @@ public class HadoopTimelineMetricsSinkTest {
     expect(conf.getString(eq("slave.host.name"))).andReturn("testhost").anyTimes();
     expect(conf.getParent()).andReturn(null).anyTimes();
     expect(conf.getPrefix()).andReturn("service").anyTimes();
-    expect(conf.getString(eq(COLLECTOR_HOST_PROPERTY))).andReturn("localhost:63188").anyTimes();
+    expect(conf.getString(eq(COLLECTOR_PROPERTY))).andReturn("localhost:63188").anyTimes();
     expect(conf.getString(eq("serviceName-prefix"), eq(""))).andReturn("").anyTimes();
 
     expect(conf.getInt(eq(MAX_METRIC_ROW_CACHE_SIZE), anyInt())).andReturn(10).anyTimes();

+ 1 - 0
ambari-metrics/ambari-metrics-host-monitoring/conf/unix/metric_monitor.ini

@@ -22,6 +22,7 @@ metrics_server = localhost:{{ams_collector_port}}
 hostname = {{hostname}}
 enable_time_threshold = false
 enable_value_threshold = false
+https_enabled = false
 
 [emitter]
 send_interval = 60

+ 5 - 1
ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/config_reader.py

@@ -91,6 +91,7 @@ debug_level = INFO
 metrics_server = host:port
 enable_time_threshold = false
 enable_value_threshold = false
+https_enabled = false
 
 [emitter]
 send_interval = 60
@@ -181,7 +182,7 @@ class Configuration:
 
   def get(self, section, key, default=None):
     try:
-      value = self.config.get(section, key)
+      value = str(self.config.get(section, key)).strip()
     except:
       return default
     return value
@@ -209,3 +210,6 @@ class Configuration:
 
   def get_max_queue_size(self):
     return int(self.get("collector", "max_queue_size", 5000))
+
+  def get_server_https_enabled(self):
+    return "true" == str(self.get("default", "https_enabled")).lower()

+ 9 - 9
ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/emitter.py

@@ -20,13 +20,12 @@ limitations under the License.
 
 import logging
 import threading
-import time
 import urllib2
 
 logger = logging.getLogger()
 
 class Emitter(threading.Thread):
-  COLLECTOR_URL = "http://{0}/ws/v1/timeline/metrics"
+  COLLECTOR_URL = "{0}://{1}/ws/v1/timeline/metrics"
   RETRY_SLEEP_INTERVAL = 5
   MAX_RETRY_COUNT = 3
   """
@@ -36,10 +35,12 @@ class Emitter(threading.Thread):
     threading.Thread.__init__(self)
     logger.debug('Initializing Emitter thread.')
     self.lock = threading.Lock()
-    self.collector_address = config.get_server_address()
     self.send_interval = config.get_send_interval()
     self._stop_handler = stop_handler
     self.application_metric_map = application_metric_map
+    # TODO verify certificate
+    protocol = 'https' if config.get_server_https_enabled() else 'http'
+    self.collector_url = self.COLLECTOR_URL.format(protocol, config.get_server_address())
 
   def run(self):
     logger.info('Running Emitter thread: %s' % threading.currentThread().getName())
@@ -54,7 +55,7 @@ class Emitter(threading.Thread):
         logger.info('Shutting down Emitter thread')
         return
     pass
-  
+
   def submit_metrics(self):
     retry_count = 0
     # This call will acquire lock on the map and clear contents before returning
@@ -73,7 +74,7 @@ class Emitter(threading.Thread):
       except Exception, e:
         logger.warn('Error sending metrics to server. %s' % str(e))
       pass
-  
+
       if response and response.getcode() == 200:
         retry_count = self.MAX_RETRY_COUNT
       else:
@@ -84,13 +85,12 @@ class Emitter(threading.Thread):
           return
       pass
     pass
-  
+    # TODO verify certificate
   def push_metrics(self, data):
     headers = {"Content-Type" : "application/json", "Accept" : "*/*"}
-    server = self.COLLECTOR_URL.format(self.collector_address.strip())
-    logger.info("server: %s" % server)
+    logger.info("server: %s" % self.collector_url)
     logger.debug("message to sent: %s" % data)
-    req = urllib2.Request(server, data, headers)
+    req = urllib2.Request(self.collector_url, data, headers)
     response = urllib2.urlopen(req, timeout=int(self.send_interval - 10))
     if response:
       logger.debug("POST response from server: retcode = {0}".format(response.getcode()))

+ 15 - 2
ambari-metrics/ambari-metrics-kafka-sink/src/main/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporter.java

@@ -63,11 +63,13 @@ public class KafkaTimelineMetricsReporter extends AbstractTimelineMetricsSink
   private static final String TIMELINE_METRICS_MAX_ROW_CACHE_SIZE_PROPERTY = "kafka.timeline.metrics.maxRowCacheSize";
   private static final String TIMELINE_HOST_PROPERTY = "kafka.timeline.metrics.host";
   private static final String TIMELINE_PORT_PROPERTY = "kafka.timeline.metrics.port";
+  private static final String TIMELINE_PROTOCOL_PROPERTY = "kafka.timeline.metrics.protocol";
   private static final String TIMELINE_REPORTER_ENABLED_PROPERTY = "kafka.timeline.metrics.reporter.enabled";
   private static final String EXCLUDED_METRICS_PROPERTY = "external.kafka.metrics.exclude.prefix";
   private static final String INCLUDED_METRICS_PROPERTY = "external.kafka.metrics.include.prefix";
   private static final String TIMELINE_DEFAULT_HOST = "localhost";
-  private static final String TIMELINE_DEFAULT_PORT = "8188";
+  private static final String TIMELINE_DEFAULT_PORT = "6188";
+  private static final String TIMELINE_DEFAULT_PROTOCOL = "http";
 
   private boolean initialized = false;
   private boolean running = false;
@@ -117,8 +119,19 @@ public class KafkaTimelineMetricsReporter extends AbstractTimelineMetricsSink
         int maxRowCacheSize = props.getInt(TIMELINE_METRICS_MAX_ROW_CACHE_SIZE_PROPERTY, MAX_RECS_PER_NAME_DEFAULT);
         String metricCollectorHost = props.getString(TIMELINE_HOST_PROPERTY, TIMELINE_DEFAULT_HOST);
         String metricCollectorPort = props.getString(TIMELINE_PORT_PROPERTY, TIMELINE_DEFAULT_PORT);
+        String metricCollectorProtocol = props.getString(TIMELINE_PROTOCOL_PROPERTY, TIMELINE_DEFAULT_PROTOCOL);
         setMetricsCache(new TimelineMetricsCache(maxRowCacheSize, metricsSendInterval));
-        collectorUri = "http://" + metricCollectorHost + ":" + metricCollectorPort + "/ws/v1/timeline/metrics";
+
+        collectorUri = metricCollectorProtocol + "://" + metricCollectorHost +
+                       ":" + metricCollectorPort + WS_V1_TIMELINE_METRICS;
+
+        if (collectorUri.toLowerCase().startsWith("https://")) {
+          String trustStorePath = props.getString(SSL_KEYSTORE_PATH_PROPERTY).trim();
+          String trustStoreType = props.getString(SSL_KEYSTORE_TYPE_PROPERTY).trim();
+          String trustStorePwd = props.getString(SSL_KEYSTORE_PASSWORD_PROPERTY).trim();
+          loadTruststore(trustStorePath, trustStoreType, trustStorePwd);
+        }
+
 
         // Exclusion policy
         String excludedMetricsStr = props.getString(EXCLUDED_METRICS_PROPERTY, "");

+ 1 - 1
ambari-metrics/ambari-metrics-kafka-sink/src/test/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporterTest.java

@@ -79,7 +79,7 @@ public class KafkaTimelineMetricsReporterTest {
     properties.setProperty("kafka.timeline.metrics.sendInterval", "5900");
     properties.setProperty("kafka.timeline.metrics.maxRowCacheSize", "10000");
     properties.setProperty("kafka.timeline.metrics.host", "localhost");
-    properties.setProperty("kafka.timeline.metrics.port", "8188");
+    properties.setProperty("kafka.timeline.metrics.port", "6188");
     properties.setProperty("kafka.timeline.metrics.reporter.enabled", "true");
     properties.setProperty("external.kafka.metrics.exclude.prefix", "a.b.c");
     properties.setProperty("external.kafka.metrics.include.prefix", "a.b.c.d");

+ 13 - 11
ambari-metrics/ambari-metrics-storm-sink/src/main/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsReporter.java

@@ -24,7 +24,6 @@ import backtype.storm.generated.TopologySummary;
 import backtype.storm.metric.IClusterReporter;
 import backtype.storm.utils.NimbusClient;
 import backtype.storm.utils.Utils;
-import org.apache.commons.lang3.ClassUtils;
 import org.apache.commons.lang3.Validate;
 import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
@@ -40,9 +39,7 @@ import java.util.Map;
 public class StormTimelineMetricsReporter extends AbstractTimelineMetricsSink
   implements IClusterReporter {
 
-  public static final String COLLECTOR_HOST = "host";
-  public static final String COLLECTOR_PORT = "port";
-  public static final String METRICS_COLLECTOR = "metrics_collector";
+  public static final String METRICS_COLLECTOR_CATEGORY = "metrics_collector";
   public static final String APP_ID = "appId";
 
   private String hostname;
@@ -79,20 +76,25 @@ public class StormTimelineMetricsReporter extends AbstractTimelineMetricsSink
         LOG.error("Could not identify hostname.");
         throw new RuntimeException("Could not identify hostname.", e);
       }
-      Validate.notNull(conf.get(METRICS_COLLECTOR), METRICS_COLLECTOR + " can not be null");
-      Map cf = (Map) conf.get(METRICS_COLLECTOR);
+      Validate.notNull(conf.get(METRICS_COLLECTOR_CATEGORY), METRICS_COLLECTOR_CATEGORY + " can not be null");
+      Map cf = (Map) conf.get(METRICS_COLLECTOR_CATEGORY);
       Map stormConf = Utils.readStormConfig();
       this.nimbusClient = NimbusClient.getConfiguredClient(stormConf);
-      String collectorHostname = cf.get(COLLECTOR_HOST).toString();
-      String port = cf.get(COLLECTOR_PORT).toString();
+      String collector = cf.get(COLLECTOR_PROPERTY).toString();
       timeoutSeconds = cf.get(METRICS_POST_TIMEOUT_SECONDS) != null ?
         Integer.parseInt(cf.get(METRICS_POST_TIMEOUT_SECONDS).toString()) :
         DEFAULT_POST_TIMEOUT_SECONDS;
       applicationId = cf.get(APP_ID).toString();
-      collectorUri = "http://" + collectorHostname + ":" + port + "/ws/v1/timeline/metrics";
+      collectorUri = collector + WS_V1_TIMELINE_METRICS;
+      if (collectorUri.toLowerCase().startsWith("https://")) {
+        String trustStorePath = cf.get(SSL_KEYSTORE_PATH_PROPERTY).toString().trim();
+        String trustStoreType = cf.get(SSL_KEYSTORE_TYPE_PROPERTY).toString().trim();
+        String trustStorePwd = cf.get(SSL_KEYSTORE_PASSWORD_PROPERTY).toString().trim();
+        loadTruststore(trustStorePath, trustStoreType, trustStorePwd);
+      }
     } catch (Exception e) {
-      LOG.warn("Could not initialize metrics collector, please specify host, " +
-        "port under $STORM_HOME/conf/config.yaml ", e);
+      LOG.warn("Could not initialize metrics collector, please specify " +
+        "protocol, host, port under $STORM_HOME/conf/config.yaml ", e);
     }
 
   }

+ 7 - 1
ambari-metrics/ambari-metrics-storm-sink/src/main/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsSink.java

@@ -77,7 +77,13 @@ public class StormTimelineMetricsSink extends AbstractTimelineMetricsSink implem
     int metricsSendInterval = Integer.parseInt(configuration.getProperty(METRICS_SEND_INTERVAL,
         String.valueOf(MAX_EVICTION_TIME_MILLIS)));
     metricsCache = new TimelineMetricsCache(maxRowCacheSize, metricsSendInterval);
-    collectorUri = "http://" + configuration.getProperty(COLLECTOR_HOST_PROPERTY) + ":" + configuration.getProperty(COLLECTOR_PORT_PROPERTY) + "/ws/v1/timeline/metrics";
+    collectorUri = configuration.getProperty(COLLECTOR_PROPERTY) + WS_V1_TIMELINE_METRICS;
+    if (collectorUri.toLowerCase().startsWith("https://")) {
+      String trustStorePath = configuration.getProperty(SSL_KEYSTORE_PATH_PROPERTY).trim();
+      String trustStoreType = configuration.getProperty(SSL_KEYSTORE_TYPE_PROPERTY).trim();
+      String trustStorePwd = configuration.getProperty(SSL_KEYSTORE_PASSWORD_PROPERTY).trim();
+      loadTruststore(trustStorePath, trustStoreType, trustStorePwd);
+    }
   }
 
   @Override

+ 6 - 5
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java

@@ -22,6 +22,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.http.HttpConfig;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.metrics2.source.JvmMetrics;
 import org.apache.hadoop.service.CompositeService;
@@ -170,15 +171,15 @@ public class ApplicationHistoryServer extends CompositeService {
     String bindAddress = metricConfiguration.getWebappAddress();
     LOG.info("Instantiating AHSWebApp at " + bindAddress);
     try {
+      Configuration conf = metricConfiguration.getMetricsConf();
+      HttpConfig.Policy policy = HttpConfig.Policy.valueOf(
+        conf.get(TimelineMetricConfiguration.TIMELINE_SERVICE_HTTP_POLICY,
+          HttpConfig.Policy.HTTP_ONLY.name()));
       webApp =
           WebApps
             .$for("applicationhistory", ApplicationHistoryClientService.class,
               ahsClientService, "ws")
-            .with(getConfig())
-            .withHttpSpnegoPrincipalKey(
-              YarnConfiguration.TIMELINE_SERVICE_PRINCIPAL)
-            .withHttpSpnegoKeytabKey(
-              YarnConfiguration.TIMELINE_SERVICE_KEYTAB)
+            .withHttpPolicy(conf, policy)
             .at(bindAddress)
             .start(new AHSWebApp(timelineStore, timelineMetricStore,
               ahsClientService));

+ 1 - 5
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java

@@ -37,7 +37,7 @@ public class RestMetricsSender implements MetricsSender {
 
   /**
    * Creates unconnected RestMetricsSender with endpoint configured as
-   * http://${metricsHost}:8188/ws/v1/timeline/metrics,
+   * http://${metricsHost}:6188/ws/v1/timeline/metrics,
    * where ${metricsHost} is specified by metricHost param.
    *
    * @param metricsHost the hostname that will be used to access application metrics history service.
@@ -70,10 +70,6 @@ public class RestMetricsSender implements MetricsSender {
       if (responseString.length() > 0) {
         LOG.debug("POST response from server: " + responseString);
       }
-    } catch (MalformedURLException e) {
-      LOG.error("", e);
-    } catch (ProtocolException e) {
-      LOG.error("", e);
     } catch (IOException e) {
       LOG.error("", e);
     } finally {

+ 4 - 2
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java

@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 
-import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -205,6 +204,9 @@ public class TimelineMetricConfiguration {
   public static final String AGGREGATORS_SKIP_BLOCK_CACHE =
     "timeline.metrics.aggregators.skip.blockcache.enabled";
 
+  public static final String TIMELINE_SERVICE_HTTP_POLICY =
+    "timeline.metrics.service.http.policy";
+
   public static final String DISABLE_METRIC_METADATA_MGMT =
     "timeline.metrics.service.metadata.management.disabled";
 
@@ -262,7 +264,7 @@ public class TimelineMetricConfiguration {
   }
 
   public String getWebappAddress() {
-    String defaultHttpAddress = "0.0.0.0:8188";
+    String defaultHttpAddress = "0.0.0.0:6188";
     if (metricsConf != null) {
       return metricsConf.get(WEBAPP_HTTP_ADDRESS, defaultHttpAddress);
     }

+ 7 - 7
ambari-server/src/main/java/org/apache/ambari/server/configuration/ComponentSSLConfiguration.java

@@ -21,7 +21,7 @@ package org.apache.ambari.server.configuration;
  * Configuration for SSL communication between Ambari and 3rd party services.
  * Currently, the following services are supported with SSL communication:
  * <ul>
- * <li>Ganglia</li>
+ * <li>Ambari metrics</li>
  * </ul>
  */
 public class ComponentSSLConfiguration {
@@ -32,7 +32,7 @@ public class ComponentSSLConfiguration {
   private String truststorePath;
   private String truststorePassword;
   private String truststoreType;
-  private boolean gangliaSSL;
+  private boolean httpsEnabled;
 
   /**
    * The singleton.
@@ -60,7 +60,7 @@ public class ComponentSSLConfiguration {
     truststorePath     = configuration.getProperty(Configuration.SSL_TRUSTSTORE_PATH_KEY);
     truststorePassword = getPassword(configuration);
     truststoreType     = configuration.getProperty(Configuration.SSL_TRUSTSTORE_TYPE_KEY);
-    gangliaSSL         = Boolean.parseBoolean(configuration.getProperty(Configuration.GANGLIA_HTTPS_KEY));
+    httpsEnabled = Boolean.parseBoolean(configuration.getProperty(Configuration.AMRABI_METRICS_HTTPS_ENABLED_KEY));
   }
 
 
@@ -94,12 +94,12 @@ public class ComponentSSLConfiguration {
   }
 
   /**
-   * Indicates whether or not Ganglia is setup for SSL.
+   * Indicates whether or not Ambari Metrics is setup for SSL.
    *
-   * @return true if Ganglia is setup for SSL
+   * @return true if AMS is setup for SSL
    */
-  public boolean isGangliaSSL() {
-    return gangliaSSL;
+  public boolean isHttpsEnabled() {
+    return httpsEnabled;
   }
 
   /**

+ 3 - 3
ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java

@@ -242,7 +242,6 @@ public class Configuration {
   public static final String JAVAX_SSL_TRUSTSTORE = "javax.net.ssl.trustStore";
   public static final String JAVAX_SSL_TRUSTSTORE_PASSWORD = "javax.net.ssl.trustStorePassword";
   public static final String JAVAX_SSL_TRUSTSTORE_TYPE = "javax.net.ssl.trustStoreType";
-  public static final String GANGLIA_HTTPS_KEY = "ganglia.https";
   public static final String SRVR_TWO_WAY_SSL_PORT_DEFAULT = "8441";
   public static final String SRVR_ONE_WAY_SSL_PORT_DEFAULT = "8440";
   public static final String SRVR_CRT_NAME_DEFAULT = "ca.crt";
@@ -483,6 +482,9 @@ public class Configuration {
   private static final String DEFAULT_TIMELINE_METRICS_CACHE_HEAP_PERCENT = "15%";
   private static final String TIMELINE_METRICS_CACHE_USE_CUSTOM_SIZING_ENGINE = "server.timeline.metrics.cache.use.custom.sizing.engine";
 
+  // Timeline Metrics SSL settings
+  public static final String AMRABI_METRICS_HTTPS_ENABLED_KEY = "server.timeline.metrics.https.enabled";
+
   /**
    * Governs the use of {@link Parallel} to process {@link StageEntity}
    * instances into {@link Stage}.
@@ -921,8 +923,6 @@ public class Configuration {
       jsonObject = jsonElement.getAsJsonObject();
     } catch (FileNotFoundException e) {
       throw new IllegalArgumentException("No file " + file, e);
-    } catch (IOException ioe){
-      throw new IllegalArgumentException("Can't read file " + file, ioe);
     }
 
     return jsonObject;

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaPropertyProvider.java

@@ -253,7 +253,7 @@ public abstract class GangliaPropertyProvider extends MetricsPropertyProvider {
     
     URIBuilder uriBuilder = new URIBuilder();
 
-    if (configuration.isGangliaSSL()) {
+    if (configuration.isHttpsEnabled()) {
       uriBuilder.setScheme("https");
     } else {
       uriBuilder.setScheme("http");

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaReportPropertyProvider.java

@@ -209,7 +209,7 @@ public class GangliaReportPropertyProvider extends MetricsReportPropertyProvider
 
     StringBuilder sb = new StringBuilder();
 
-    if (configuration.isGangliaSSL()) {
+    if (configuration.isHttpsEnabled()) {
       sb.append("https://");
     } else {
       sb.append("http://");

+ 4 - 4
ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProvider.java

@@ -34,7 +34,6 @@ import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.controller.spi.TemporalInfo;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
-import org.apache.ambari.server.controller.utilities.StreamProvider;
 import org.apache.ambari.server.state.StackId;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
@@ -625,7 +624,8 @@ public abstract class AMSPropertyProvider extends MetricsPropertyProvider {
             if (metricsRequest == null) {
               metricsRequest = new MetricsRequest(temporalInfo,
                 getAMSUriBuilder(collectorHostName,
-                  collectorPort != null ? Integer.parseInt(collectorPort) : COLLECTOR_DEFAULT_PORT),
+                  collectorPort != null ? Integer.parseInt(collectorPort) : COLLECTOR_DEFAULT_PORT,
+                  configuration.isHttpsEnabled()),
                   (String) resource.getPropertyValue(clusterNamePropertyId));
               requests.put(temporalInfo, metricsRequest);
             }
@@ -643,9 +643,9 @@ public abstract class AMSPropertyProvider extends MetricsPropertyProvider {
     return requestMap;
   }
 
-  static URIBuilder getAMSUriBuilder(String hostname, int port) {
+  static URIBuilder getAMSUriBuilder(String hostname, int port, boolean httpsEnabled) {
     URIBuilder uriBuilder = new URIBuilder();
-    uriBuilder.setScheme("http");
+    uriBuilder.setScheme(httpsEnabled ? "https" : "http");
     uriBuilder.setHost(hostname);
     uriBuilder.setPort(port);
     uriBuilder.setPath("/ws/v1/timeline/metrics");

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSReportPropertyProvider.java

@@ -174,7 +174,7 @@ public class AMSReportPropertyProvider extends MetricsReportPropertyProvider {
     String host = hostProvider.getCollectorHostName(clusterName, TIMELINE_METRICS);
     String port = hostProvider.getCollectorPort(clusterName, TIMELINE_METRICS);
     URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder(host,
-      port != null ? Integer.parseInt(port) : 8188);
+      port != null ? Integer.parseInt(port) : 6188, configuration.isHttpsEnabled());
 
     for (Map.Entry<String, MetricReportRequest> entry : reportRequestMap.entrySet()) {
       MetricReportRequest reportRequest = entry.getValue();

+ 1 - 0
ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/metainfo.xml

@@ -172,6 +172,7 @@
       <configuration-dependencies>
         <config-type>accumulo-env</config-type>
         <config-type>accumulo-site</config-type>
+        <config-type>ams-ssl-client</config-type>
       </configuration-dependencies>
 
     </service>

+ 7 - 0
ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py

@@ -131,6 +131,13 @@ if has_metric_collector:
       metric_collector_port = metric_collector_web_address.split(':')[1]
     else:
       metric_collector_port = '6188'
+  if default("/configurations/ams-site/timeline.metrics.service.http.policy", "HTTP_ONLY") == "HTTPS_ONLY":
+    metric_collector_protocol = 'https'
+  else:
+    metric_collector_protocol = 'http'
+  metric_truststore_path= default("/configurations/ams-ssl-client/ssl.client.truststore.location", "")
+  metric_truststore_type= default("/configurations/ams-ssl-client/ssl.client.truststore.type", "")
+  metric_truststore_password= default("/configurations/ams-ssl-client/ssl.client.truststore.password", "")
   pass
 metrics_report_interval = default("/configurations/ams-site/timeline.metrics.sink.report.interval", 60)
 metrics_collection_period = default("/configurations/ams-site/timeline.metrics.sink.collection.period", 10)

+ 6 - 1
ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/templates/hadoop-metrics2-accumulo.properties.j2

@@ -35,7 +35,12 @@ rpc.collector={{metric_collector_host}}:{{metric_collector_port}}
 accumulo.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 accumulo.sink.timeline.period={{metrics_collection_period}}
 accumulo.sink.timeline.sendInterval={{metrics_report_interval}}000
-accumulo.sink.timeline.collector={{metric_collector_host}}:{{metric_collector_port}}
+accumulo.sink.timeline.collector={{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}}
+
+# HTTPS properties
+accumulo.sink.timeline.truststore.path = {{metric_truststore_path}}
+accumulo.sink.timeline.truststore.type = {{metric_truststore_type}}
+accumulo.sink.timeline.truststore.password = {{metric_truststore_password}}
 
 {% else %}
 

+ 11 - 1
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml

@@ -484,7 +484,6 @@
       </property>
     </depends-on>
   </property>
-
   <property>
     <name>timeline.metrics.sink.report.interval</name>
     <value>60</value>
@@ -553,5 +552,16 @@
       utilization only for user queries.
     </description>
   </property>
+  <property>
+    <name>timeline.metrics.service.http.policy</name>
+    <value>HTTP_ONLY</value>
+    <description>
+      This configures the HTTP endpoint for Yarn Application History Server for
+      Ambari Metrics System.
+      The following values are supported:
+      - HTTP_ONLY : Service is provided only on http
+      - HTTPS_ONLY : Service is provided only on https
+    </description>
+  </property>
 
 </configuration>

+ 37 - 0
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-ssl-client.xml

@@ -0,0 +1,37 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration>
+    <property>
+        <name>ssl.client.truststore.location</name>
+        <value>/etc/security/clientKeys/all.jks</value>
+        <description>Location of the trust store file.</description>
+    </property>
+    <property>
+        <name>ssl.client.truststore.type</name>
+        <value>jks</value>
+        <description>Optional. Default value is "jks".</description>
+    </property>
+    <property>
+        <name>ssl.client.truststore.password</name>
+        <value>bigdata</value>
+        <property-type>PASSWORD</property-type>
+        <description>Password to open the trust store file.</description>
+    </property>
+</configuration>

+ 64 - 0
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-ssl-server.xml

@@ -0,0 +1,64 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration>
+    <property>
+        <name>ssl.server.truststore.location</name>
+        <value>/etc/security/serverKeys/all.jks</value>
+        <description>Location of the trust store file.</description>
+    </property>
+    <property>
+        <name>ssl.server.truststore.type</name>
+        <value>jks</value>
+        <description>Optional. Default value is "jks".</description>
+    </property>
+    <property>
+        <name>ssl.server.truststore.password</name>
+        <value>bigdata</value>
+        <property-type>PASSWORD</property-type>
+        <description>Password to open the trust store file.</description>
+    </property>
+    <property>
+        <name>ssl.server.truststore.reload.interval</name>
+        <value>10000</value>
+        <description>Truststore reload interval, in milliseconds.</description>
+    </property>
+    <property>
+        <name>ssl.server.keystore.type</name>
+        <value>jks</value>
+        <description>Optional. Default value is "jks".</description>
+    </property>
+    <property>
+        <name>ssl.server.keystore.location</name>
+        <value>/etc/security/serverKeys/keystore.jks</value>
+        <description>Location of the keystore file.</description>
+    </property>
+    <property>
+        <name>ssl.server.keystore.password</name>
+        <value>bigdata</value>
+        <property-type>PASSWORD</property-type>
+        <description>Password to open the keystore file.</description>
+    </property>
+    <property>
+        <name>ssl.server.keystore.keypassword</name>
+        <value>bigdata</value>
+        <property-type>PASSWORD</property-type>
+        <description>Password for private key in keystore file.</description>
+    </property>
+</configuration>

+ 2 - 0
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/metainfo.xml

@@ -155,6 +155,8 @@
         <config-type>ams-hbase-log4j</config-type>
         <config-type>ams-grafana-env</config-type>
         <config-type>ams-grafana-ini</config-type>
+        <config-type>ams-ssl-server</config-type>
+        <config-type>ams-ssl-client</config-type>
       </configuration-dependencies>
 
       <excluded-config-types>

+ 8 - 0
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams.py

@@ -204,6 +204,14 @@ def ams(name=None):
               group=params.user_group
     )
 
+    XmlConfig("ssl-server.xml",
+              conf_dir=params.ams_collector_conf_dir,
+              configurations=params.config['configurations']['ams-ssl-server'],
+              configuration_attributes=params.config['configuration_attributes']['ams-ssl-server'],
+              owner=params.ams_user,
+              group=params.user_group
+    )
+
     merged_ams_hbase_site = {}
     merged_ams_hbase_site.update(params.config['configurations']['ams-hbase-site'])
     if params.security_enabled:

+ 1 - 1
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana_util.py

@@ -40,7 +40,7 @@ def create_ams_datasource():
   Logger.info("Connecting (GET) to %s:%s%s" % (params.hostname,
                                                params.ams_grafana_port,
                                                GRAFANA_URL))
-
+# TODO add https support
   conn = httplib.HTTPConnection(params.hostname,
                                 int(params.ams_grafana_port))
 

+ 10 - 0
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py

@@ -50,6 +50,16 @@ ams_pid_dir = status_params.ams_collector_pid_dir
 ams_collector_script = "/usr/sbin/ambari-metrics-collector"
 ams_collector_pid_dir = status_params.ams_collector_pid_dir
 ams_collector_hosts = default("/clusterHostInfo/metrics_collector_hosts", [])
+if default("/configurations/ams-site/timeline.metrics.service.http.policy", "HTTP_ONLY") == "HTTPS_ONLY":
+  metric_collector_https_enabled = True
+  metric_collector_protocol = 'https'
+else:
+  metric_collector_https_enabled = False
+  metric_collector_protocol = 'http'
+metric_truststore_path= default("/configurations/ams-ssl-client/ssl.client.truststore.location", "")
+metric_truststore_type= default("/configurations/ams-ssl-client/ssl.client.truststore.type", "")
+metric_truststore_password= default("/configurations/ams-ssl-client/ssl.client.truststore.password", "")
+
 if 'cluster-env' in config['configurations'] and \
     'metrics_collector_vip_host' in config['configurations']['cluster-env']:
   metric_collector_host = config['configurations']['cluster-env']['metrics_collector_vip_host']

+ 13 - 4
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py

@@ -79,8 +79,9 @@ class AMSServiceCheck(Script):
         Logger.info("Connecting (POST) to %s:%s%s" % (params.metric_collector_host,
                                                       params.metric_collector_port,
                                                       self.AMS_METRICS_POST_URL))
-        conn = httplib.HTTPConnection(params.metric_collector_host,
-                                        int(params.metric_collector_port))
+        conn = self.get_http_connection(params.metric_collector_host,
+                                        int(params.metric_collector_port),
+                                        params.metric_collector_https_enabled)
         conn.request("POST", self.AMS_METRICS_POST_URL, metric_json, headers)
 
         response = conn.getresponse()
@@ -127,8 +128,9 @@ class AMSServiceCheck(Script):
                                                  params.metric_collector_port,
                                               self.AMS_METRICS_GET_URL % encoded_get_metrics_parameters))
 
-    conn = httplib.HTTPConnection(params.metric_collector_host,
-                                  int(params.metric_collector_port))
+    conn = self.get_http_connection(params.metric_collector_host,
+                                    int(params.metric_collector_port),
+                                    params.metric_collector_https_enabled)
     conn.request("GET", self.AMS_METRICS_GET_URL % encoded_get_metrics_parameters)
     response = conn.getresponse()
     Logger.info("Http response: %s %s" % (response.status, response.reason))
@@ -161,6 +163,13 @@ class AMSServiceCheck(Script):
 
     Logger.info("Ambari Metrics service check is finished.")
 
+  def get_http_connection(self, host, port, https_enabled=False):
+    if https_enabled:
+      # TODO verify certificate
+      return httplib.HTTPSConnection(host, port)
+    else:
+      return httplib.HTTPConnection(host, port)
+
 if __name__ == "__main__":
   AMSServiceCheck().execute()
 

+ 6 - 1
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2

@@ -55,9 +55,14 @@ rpc.collector={{metric_collector_host}}:{{metric_collector_port}}
 hbase.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 hbase.sink.timeline.period={{metrics_collection_period}}
 hbase.sink.timeline.sendInterval={{metrics_report_interval}}000
-hbase.sink.timeline.collector={{metric_collector_host}}:{{metric_collector_port}}
+hbase.sink.timeline.collector={{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}}
 hbase.sink.timeline.serviceName-prefix=ams
 
+# HTTPS properties
+hbase.sink.timeline.truststore.path = {{metric_truststore_path}}
+hbase.sink.timeline.truststore.type = {{metric_truststore_type}}
+hbase.sink.timeline.truststore.password = {{metric_truststore_password}}
+
 # Switch off metrics generation on a per region basis
 *.source.filter.class=org.apache.hadoop.metrics2.filter.GlobFilter
 hbase.*.source.filter.exclude=*Regions*

+ 1 - 0
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metric_monitor.ini.j2

@@ -22,6 +22,7 @@ metrics_server = {{metric_collector_host}}:{{metric_collector_port}}
 hostname = {{hostname}}
 enable_time_threshold = false
 enable_value_threshold = false
+https_enabled = {{metric_collector_https_enabled}}
 
 [emitter]
 send_interval = {{metrics_report_interval}}

+ 1 - 0
ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/metainfo.xml

@@ -58,6 +58,7 @@
       <configuration-dependencies>
         <config-type>flume-env</config-type>
         <config-type>flume-conf</config-type>
+        <config-type>ams-ssl-client</config-type>
       </configuration-dependencies>
 
     </service>

+ 7 - 0
ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py

@@ -101,6 +101,13 @@ if has_metric_collector:
       metric_collector_port = metric_collector_web_address.split(':')[1]
     else:
       metric_collector_port = '6188'
+  if default("/configurations/ams-site/timeline.metrics.service.http.policy", "HTTP_ONLY") == "HTTPS_ONLY":
+    metric_collector_protocol = 'https'
+  else:
+    metric_collector_protocol = 'http'
+  metric_truststore_path= default("/configurations/ams-ssl-client/ssl.client.truststore.location", "")
+  metric_truststore_type= default("/configurations/ams-ssl-client/ssl.client.truststore.type", "")
+  metric_truststore_password= default("/configurations/ams-ssl-client/ssl.client.truststore.password", "")
   pass
 metrics_report_interval = default("/configurations/ams-site/timeline.metrics.sink.report.interval", 60)
 metrics_collection_period = default("/configurations/ams-site/timeline.metrics.sink.collection.period", 10)

+ 6 - 2
ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/templates/flume-metrics2.properties.j2

@@ -16,11 +16,15 @@
 # limitations under the License.
 #}
 
-collector={{metric_collector_host}}
-port={{metric_collector_port}}
+collector={{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}}
 collectionFrequency={{metrics_collection_period}}000
 maxRowCacheSize=10000
 sendInterval={{metrics_report_interval}}000
 
+# HTTPS properties
+truststore.path = {{metric_truststore_path}}
+truststore.type = {{metric_truststore_type}}
+truststore.password = {{metric_truststore_password}}
+
 # Metric names having type COUNTER
 counters=EventTakeSuccessCount,EventPutSuccessCount,EventTakeAttemptCount,EventPutAttemptCount

+ 1 - 0
ambari-server/src/main/resources/common-services/HAWQ/2.0.0/metainfo.xml

@@ -149,6 +149,7 @@
         <config-type>yarn-client</config-type>
         <config-type>hawq-limits-env</config-type>
         <config-type>hawq-sysctl-env</config-type>
+        <config-type>ams-ssl-client</config-type>
       </configuration-dependencies>
     </service>
 

+ 1 - 0
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/metainfo.xml

@@ -147,6 +147,7 @@
         <config-type>ranger-hbase-audit</config-type>
         <config-type>ranger-hbase-policymgr-ssl</config-type>
         <config-type>ranger-hbase-security</config-type>
+        <config-type>ams-ssl-client</config-type>
       </configuration-dependencies>
 
     </service>

+ 8 - 0
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py

@@ -149,6 +149,14 @@ if has_metric_collector:
       metric_collector_port = metric_collector_web_address.split(':')[1]
     else:
       metric_collector_port = '6188'
+  if default("/configurations/ams-site/timeline.metrics.service.http.policy", "HTTP_ONLY") == "HTTPS_ONLY":
+    metric_collector_protocol = 'https'
+  else:
+    metric_collector_protocol = 'http'
+  metric_truststore_path= default("/configurations/ams-ssl-client/ssl.client.truststore.location", "")
+  metric_truststore_type= default("/configurations/ams-ssl-client/ssl.client.truststore.type", "")
+  metric_truststore_password= default("/configurations/ams-ssl-client/ssl.client.truststore.password", "")
+
   pass
 metrics_report_interval = default("/configurations/ams-site/timeline.metrics.sink.report.interval", 60)
 metrics_collection_period = default("/configurations/ams-site/timeline.metrics.sink.collection.period", 10)

+ 6 - 1
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2

@@ -66,7 +66,12 @@ rpc.collector={{metric_collector_host}}:{{metric_collector_port}}
 hbase.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 hbase.sink.timeline.period={{metrics_collection_period}}
 hbase.sink.timeline.sendInterval={{metrics_report_interval}}000
-hbase.sink.timeline.collector={{metric_collector_host}}:{{metric_collector_port}}
+hbase.sink.timeline.collector={{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}}
+
+# HTTPS properties
+hbase.sink.timeline.truststore.path = {{metric_truststore_path}}
+hbase.sink.timeline.truststore.type = {{metric_truststore_type}}
+hbase.sink.timeline.truststore.password = {{metric_truststore_password}}
 
 {% else %}
 

+ 6 - 1
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2

@@ -65,7 +65,12 @@ rpc.collector={{metric_collector_host}}:{{metric_collector_port}}
 hbase.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 hbase.sink.timeline.period={{metrics_collection_period}}
 hbase.sink.timeline.sendInterval={{metrics_report_interval}}000
-hbase.sink.timeline.collector={{metric_collector_host}}:{{metric_collector_port}}
+hbase.sink.timeline.collector={{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}}
+
+# HTTPS properties
+hbase.sink.timeline.truststore.path = {{metric_truststore_path}}
+hbase.sink.timeline.truststore.type = {{metric_truststore_type}}
+hbase.sink.timeline.truststore.password = {{metric_truststore_password}}
 
 {% else %}
 

+ 1 - 0
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml

@@ -245,6 +245,7 @@
         <config-type>ranger-hdfs-audit</config-type>
         <config-type>ranger-hdfs-policymgr-ssl</config-type>
         <config-type>ranger-hdfs-security</config-type>
+        <config-type>ams-ssl-client</config-type>
       </configuration-dependencies>
       <restartRequiredAfterRackChange>true</restartRequiredAfterRackChange>
     </service>

+ 21 - 0
ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/configuration/kafka-broker.xml

@@ -324,6 +324,27 @@
     <value>{{metric_collector_port}}</value>
     <description>Timeline port</description>
   </property>
+  <property>
+  <property>
+    <name>kafka.timeline.metrics.protocol</name>
+    <value>{{metric_collector_protocol}}</value>
+    <description>Timeline protocol(http or https)</description>
+  </property>
+  <property>
+    <name>kafka.timeline.metrics.truststore.path</name>
+    <value>{{metric_truststore_path}}</value>
+    <description>Location of the trust store file.</description>
+  </property>
+  <property>
+    <name>kafka.timeline.metrics.truststore.type</name>
+    <value>{{metric_truststore_type}}</value>
+    <description>Optional. Default value is "jks".</description>
+  </property>
+  <property>
+    <name>kafka.timeline.metrics.truststore.password</name>
+    <value>{{metric_truststore_password}}</value>
+    <description>Password to open the trust store file.</description>
+  </property>
   <property>
     <name>kafka.timeline.metrics.reporter.sendInterval</name>
     <value>5900</value>

+ 1 - 0
ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/metainfo.xml

@@ -64,6 +64,7 @@
         <config-type>ranger-kafka-security</config-type>
         <config-type>zookeeper-env</config-type>
         <config-type>zoo.cfg</config-type>
+        <config-type>ams-ssl-client</config-type>
       </configuration-dependencies>
       <restartRequiredAfterChange>true</restartRequiredAfterChange>
     </service>

+ 4 - 0
ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka.py

@@ -74,6 +74,10 @@ def kafka(upgrade_type=None):
     if params.has_metric_collector:
       kafka_server_config['kafka.timeline.metrics.host'] = params.metric_collector_host
       kafka_server_config['kafka.timeline.metrics.port'] = params.metric_collector_port
+      kafka_server_config['kafka.timeline.metrics.protocol'] = params.metric_collector_protocol
+      kafka_server_config['kafka.timeline.metrics.truststore.path'] = params.metric_truststore_path
+      kafka_server_config['kafka.timeline.metrics.truststore.type'] = params.metric_truststore_type
+      kafka_server_config['kafka.timeline.metrics.truststore.password'] = params.metric_truststore_password
 
     kafka_data_dir = kafka_server_config['log.dirs']
     kafka_data_dirs = filter(None, kafka_data_dir.split(","))

+ 9 - 1
ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py

@@ -106,6 +106,10 @@ else:
 
 metric_collector_host = ""
 metric_collector_port = ""
+metric_collector_protocol = ""
+metric_truststore_path= default("/configurations/ams-ssl-client/ssl.client.truststore.location", "")
+metric_truststore_type= default("/configurations/ams-ssl-client/ssl.client.truststore.type", "")
+metric_truststore_password= default("/configurations/ams-ssl-client/ssl.client.truststore.password", "")
 
 ams_collector_hosts = default("/clusterHostInfo/metrics_collector_hosts", [])
 has_metric_collector = not len(ams_collector_hosts) == 0
@@ -125,6 +129,10 @@ if has_metric_collector:
       metric_collector_port = metric_collector_web_address.split(':')[1]
     else:
       metric_collector_port = '6188'
+  if default("/configurations/ams-site/timeline.metrics.service.http.policy", "HTTP_ONLY") == "HTTPS_ONLY":
+    metric_collector_protocol = 'https'
+  else:
+    metric_collector_protocol = 'http'
   pass
 # Security-related params
 security_enabled = config['configurations']['cluster-env']['security_enabled']
@@ -274,4 +282,4 @@ HdfsResource = functools.partial(
   principal_name = hdfs_principal_name,
   hdfs_site = hdfs_site,
   default_fs = default_fs
-)
+)

+ 1 - 0
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/metainfo.xml

@@ -134,6 +134,7 @@
         <config-type>ranger-admin-site</config-type>
         <config-type>zookeeper-env</config-type>
         <config-type>zoo.cfg</config-type>
+        <config-type>ams-ssl-client</config-type>
       </configuration-dependencies>
       <quickLinksConfigurations>
         <quickLinksConfiguration>

+ 9 - 2
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py

@@ -165,7 +165,14 @@ if has_metric_collector:
 
   metric_collector_report_interval = 60
   metric_collector_app_id = "nimbus"
-
+  if default("/configurations/ams-site/timeline.metrics.service.http.policy", "HTTP_ONLY") == "HTTPS_ONLY":
+    metric_collector_protocol = 'https'
+  else:
+    metric_collector_protocol = 'http'
+  metric_truststore_path= default("/configurations/ams-ssl-client/ssl.client.truststore.location", "")
+  metric_truststore_type= default("/configurations/ams-ssl-client/ssl.client.truststore.type", "")
+  metric_truststore_password= default("/configurations/ams-ssl-client/ssl.client.truststore.password", "")
+  pass
 metrics_report_interval = default("/configurations/ams-site/timeline.metrics.sink.report.interval", 60)
 metrics_collection_period = default("/configurations/ams-site/timeline.metrics.sink.collection.period", 10)
 metric_collector_sink_jar = "/usr/lib/storm/lib/ambari-metrics-storm-sink*.jar"
@@ -296,4 +303,4 @@ HdfsResource = functools.partial(
   principal_name = hdfs_principal_name,
   hdfs_site = hdfs_site,
   default_fs = default_fs
-)
+)

+ 6 - 2
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/templates/config.yaml.j2

@@ -57,8 +57,12 @@ enableMetricsSink: True
 metrics_collector:
 
   reportInterval: {{metric_collector_report_interval}}
-  host: "{{metric_collector_host}}"
-  port: {{metric_collector_port}}
+  collector: "{{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}}"
   appId: "{{metric_collector_app_id}}"
 
+  # HTTPS settings
+  truststore.path : "{{metric_truststore_path}}"
+  truststore.type : "{{metric_truststore_type}}"
+  truststore.password : "{{metric_truststore_password}}"
+
 {% endif %}

+ 7 - 3
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/templates/storm-metrics2.properties.j2

@@ -16,7 +16,11 @@
 # limitations under the License.
 #}
 
-collector={{metric_collector_host}}
-port={{metric_collector_port}}
+collector={{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}}
 maxRowCacheSize=10000
-sendInterval={{metrics_report_interval}}000
+sendInterval={{metrics_report_interval}}000
+
+# HTTPS properties
+truststore.path = {{metric_truststore_path}}
+truststore.type = {{metric_truststore_type}}
+truststore.password = {{metric_truststore_password}}

+ 2 - 0
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/metainfo.xml

@@ -150,6 +150,7 @@
         <config-type>core-site</config-type>
         <config-type>mapred-site</config-type>
         <config-type>yarn-log4j</config-type>
+        <config-type>ams-ssl-client</config-type>
       </configuration-dependencies>
       <widgetsFileName>YARN_widgets.json</widgetsFileName>
       <metricsFileName>YARN_metrics.json</metricsFileName>
@@ -259,6 +260,7 @@
         <config-type>ranger-yarn-audit</config-type>
         <config-type>ranger-yarn-policymgr-ssl</config-type>
         <config-type>ranger-yarn-security</config-type>
+        <config-type>ams-ssl-client</config-type>
       </configuration-dependencies>
       <restartRequiredAfterRackChange>true</restartRequiredAfterRackChange>
       <widgetsFileName>MAPREDUCE2_widgets.json</widgetsFileName>

+ 8 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py

@@ -118,6 +118,14 @@ if has_metric_collector:
       metric_collector_port = metric_collector_web_address.split(':')[1]
     else:
       metric_collector_port = '6188'
+  if default("/configurations/ams-site/timeline.metrics.service.http.policy", "HTTP_ONLY") == "HTTPS_ONLY":
+    metric_collector_protocol = 'https'
+  else:
+    metric_collector_protocol = 'http'
+  metric_truststore_path= default("/configurations/ams-ssl-client/ssl.client.truststore.location", "")
+  metric_truststore_type= default("/configurations/ams-ssl-client/ssl.client.truststore.type", "")
+  metric_truststore_password= default("/configurations/ams-ssl-client/ssl.client.truststore.password", "")
+
   pass
 metrics_report_interval = default("/configurations/ams-site/timeline.metrics.sink.report.interval", 60)
 metrics_collection_period = default("/configurations/ams-site/timeline.metrics.sink.collection.period", 10)

+ 15 - 10
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/hadoop-metrics2.properties.j2

@@ -73,16 +73,21 @@ resourcemanager.sink.ganglia.tagsForPrefix.yarn=Queue
 *.sink.timeline.sendInterval={{metrics_report_interval}}000
 *.sink.timeline.slave.host.name = {{hostname}}
 
-datanode.sink.timeline.collector={{metric_collector_host}}:{{metric_collector_port}}
-namenode.sink.timeline.collector={{metric_collector_host}}:{{metric_collector_port}}
-resourcemanager.sink.timeline.collector={{metric_collector_host}}:{{metric_collector_port}}
-nodemanager.sink.timeline.collector={{metric_collector_host}}:{{metric_collector_port}}
-historyserver.sink.timeline.collector={{metric_collector_host}}:{{metric_collector_port}}
-journalnode.sink.timeline.collector={{metric_collector_host}}:{{metric_collector_port}}
-nimbus.sink.timeline.collector={{metric_collector_host}}:{{metric_collector_port}}
-supervisor.sink.timeline.collector={{metric_collector_host}}:{{metric_collector_port}}
-maptask.sink.timeline.collector={{metric_collector_host}}:{{metric_collector_port}}
-reducetask.sink.timeline.collector={{metric_collector_host}}:{{metric_collector_port}}
+# HTTPS properties
+*.sink.timeline.truststore.path = {{metric_truststore_path}}
+*.sink.timeline.truststore.type = {{metric_truststore_type}}
+*.sink.timeline.truststore.password = {{metric_truststore_password}}
+
+datanode.sink.timeline.collector={{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}}
+namenode.sink.timeline.collector={{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}}
+resourcemanager.sink.timeline.collector={{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}}
+nodemanager.sink.timeline.collector={{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}}
+historyserver.sink.timeline.collector={{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}}
+journalnode.sink.timeline.collector={{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}}
+nimbus.sink.timeline.collector={{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}}
+supervisor.sink.timeline.collector={{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}}
+maptask.sink.timeline.collector={{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}}
+reducetask.sink.timeline.collector={{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}}
 
 resourcemanager.sink.timeline.tagsForPrefix.yarn=Queue
 

+ 3 - 3
ambari-server/src/test/java/org/apache/ambari/server/configuration/ComponentSSLConfigurationTest.java

@@ -29,12 +29,12 @@ import org.junit.Test;
 public class ComponentSSLConfigurationTest {
 
   public static ComponentSSLConfiguration getConfiguration(String path,
-      String pass, String type, boolean gangliaSSL) {
+      String pass, String type, boolean isSslEnabled) {
     Properties ambariProperties = new Properties();
     ambariProperties.setProperty(Configuration.SSL_TRUSTSTORE_PATH_KEY, path);
     ambariProperties.setProperty(Configuration.SSL_TRUSTSTORE_PASSWORD_KEY, pass);
     ambariProperties.setProperty(Configuration.SSL_TRUSTSTORE_TYPE_KEY, type);
-    ambariProperties.setProperty(Configuration.GANGLIA_HTTPS_KEY, Boolean.toString(gangliaSSL));
+    ambariProperties.setProperty(Configuration.AMRABI_METRICS_HTTPS_ENABLED_KEY, Boolean.toString(isSslEnabled));
 
     Configuration configuration =  new TestConfiguration(ambariProperties);
 
@@ -70,7 +70,7 @@ public class ComponentSSLConfigurationTest {
   public void testIsGangliaSSL() throws Exception {
     ComponentSSLConfiguration sslConfiguration = getConfiguration("tspath",
         "tspass", "tstype", true);
-    Assert.assertTrue(sslConfiguration.isGangliaSSL());
+    Assert.assertTrue(sslConfiguration.isHttpsEnabled());
   }
 
   private static class TestConfiguration extends Configuration {

+ 13 - 13
ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaPropertyProviderTest.java

@@ -220,7 +220,7 @@ public class GangliaPropertyProviderTest {
     Assert.assertEquals(1, propertyProvider.populateResources(Collections.singleton(resource), request, null).size());
 
 
-    String expected = (configuration.isGangliaSSL() ? "https" : "http") +
+    String expected = (configuration.isHttpsEnabled() ? "https" : "http") +
         "://domU-12-31-39-0E-34-E1.compute-1.internal/cgi-bin/rrd.py?c=HDPDataNode%2CHDPSlaves&h=domU-12-31-39-0E-34-E1.compute-1.internal&m=jvm.metrics.gcCount&s=10&e=20&r=1";
     Assert.assertEquals(expected, streamProvider.getLastSpec());
 
@@ -269,7 +269,7 @@ public class GangliaPropertyProviderTest {
     
     URIBuilder expectedUri = new URIBuilder();
 
-    expectedUri.setScheme((configuration.isGangliaSSL() ? "https" : "http"));
+    expectedUri.setScheme((configuration.isHttpsEnabled() ? "https" : "http"));
     expectedUri.setHost("domU-12-31-39-0E-34-E1.compute-1.internal");
     expectedUri.setPath("/cgi-bin/rrd.py");
     expectedUri.setParameter("c", "HDPTaskTracker,HDPSlaves");
@@ -418,7 +418,7 @@ public class GangliaPropertyProviderTest {
     
     URIBuilder uriBuilder = new URIBuilder();
 
-    uriBuilder.setScheme((configuration.isGangliaSSL() ? "https" : "http"));
+    uriBuilder.setScheme((configuration.isHttpsEnabled() ? "https" : "http"));
     uriBuilder.setHost("domU-12-31-39-0E-34-E1.compute-1.internal");
     uriBuilder.setPath("/cgi-bin/rrd.py");
     uriBuilder.setParameter("c", "HDPJobTracker,HDPHBaseMaster,HDPResourceManager,HDPFlumeServer,HDPSlaves,HDPHistoryServer,HDPJournalNode,HDPTaskTracker,HDPHBaseRegionServer,HDPNameNode");
@@ -435,7 +435,7 @@ public class GangliaPropertyProviderTest {
         "HDPSlaves", "HDPHistoryServer", "HDPJournalNode", "HDPTaskTracker", "HDPHBaseRegionServer", "HDPNameNode"});
     List<String> hosts = Arrays.asList(new String[]{"domU-12-31-39-0E-34-E3.compute-1.internal", "domU-12-31-39-0E-34-E1.compute-1.internal",
         "domU-12-31-39-0E-34-E2.compute-1.internal"});
-    int httpsVariation = configuration.isGangliaSSL() ? 1 : 0;
+    int httpsVariation = configuration.isHttpsEnabled() ? 1 : 0;
 
     Assert.assertEquals(expected.substring(0, 66 + httpsVariation), streamProvider.getLastSpec().substring(0, 66 + httpsVariation));
     Assert.assertTrue(CollectionPresentationUtils.isStringPermutationOfCollection(streamProvider.getLastSpec().substring(66 + httpsVariation, 236 + httpsVariation), components, "%2C", 0, 0));
@@ -487,7 +487,7 @@ public class GangliaPropertyProviderTest {
     
     URIBuilder expectedUri = new URIBuilder();
     
-    expectedUri.setScheme((configuration.isGangliaSSL() ? "https" : "http"));
+    expectedUri.setScheme((configuration.isHttpsEnabled() ? "https" : "http"));
     expectedUri.setHost("domU-12-31-39-0E-34-E1.compute-1.internal");
     expectedUri.setPath("/cgi-bin/rrd.py");
     expectedUri.setParameter("c", "HDPJobTracker,HDPHBaseMaster,HDPResourceManager,HDPFlumeServer,HDPSlaves,HDPHistoryServer,HDPJournalNode,HDPTaskTracker,HDPHBaseRegionServer,HDPNameNode");
@@ -543,7 +543,7 @@ public class GangliaPropertyProviderTest {
     
     URIBuilder expectedUri = new URIBuilder();
     
-    expectedUri.setScheme((configuration.isGangliaSSL() ? "https" : "http"));
+    expectedUri.setScheme((configuration.isHttpsEnabled() ? "https" : "http"));
     expectedUri.setHost("domU-12-31-39-0E-34-E1.compute-1.internal");
     expectedUri.setPath("/cgi-bin/rrd.py");
     expectedUri.setParameter("c", "HDPFlumeServer,HDPSlaves");
@@ -606,7 +606,7 @@ public class GangliaPropertyProviderTest {
     
     URIBuilder expectedUri = new URIBuilder();
 
-    expectedUri.setScheme((configuration.isGangliaSSL() ? "https" : "http"));
+    expectedUri.setScheme((configuration.isHttpsEnabled() ? "https" : "http"));
     expectedUri.setHost("domU-12-31-39-0E-34-E1.compute-1.internal");
     expectedUri.setPath("/cgi-bin/rrd.py");
     expectedUri.setParameter("c", "HDPFlumeServer,HDPSlaves");
@@ -653,12 +653,12 @@ public class GangliaPropertyProviderTest {
 
     Assert.assertEquals(1, propertyProvider.populateResources(Collections.singleton(resource), request, null).size());
 
-    String expected = (configuration.isGangliaSSL() ? "https" : "http") +
+    String expected = (configuration.isHttpsEnabled() ? "https" : "http") +
         "://domU-12-31-39-0E-34-E1.compute-1.internal/cgi-bin/rrd.py?c=HDPFlumeServer%2CHDPSlaves&h=ip-10-39-113-33.ec2.internal&m=";
 
     // Depends on hashing, string representation can be different
     List<String> components = Arrays.asList(new String[]{"HDPFlumeServer", "HDPSlaves"});
-    int httpsVariation = configuration.isGangliaSSL() ? 1 : 0;
+    int httpsVariation = configuration.isHttpsEnabled() ? 1 : 0;
 
     Assert.assertEquals(expected.substring(0, 66 + httpsVariation), streamProvider.getLastSpec().substring(0, 66 + httpsVariation));
     Assert.assertTrue(CollectionPresentationUtils.isStringPermutationOfCollection(streamProvider.getLastSpec().substring(66 + httpsVariation, 92 + httpsVariation), components, "%2C", 0, 0));
@@ -704,7 +704,7 @@ public class GangliaPropertyProviderTest {
     
     URIBuilder expectedUri = new URIBuilder();
 
-    expectedUri.setScheme((configuration.isGangliaSSL() ? "https" : "http"));
+    expectedUri.setScheme((configuration.isHttpsEnabled() ? "https" : "http"));
     expectedUri.setHost("domU-12-31-39-0E-34-E1.compute-1.internal");
     expectedUri.setPath("/cgi-bin/rrd.py");
     expectedUri.setParameter("c", "HDPFlumeServer,HDPSlaves");
@@ -762,7 +762,7 @@ public class GangliaPropertyProviderTest {
     
     URIBuilder expectedUri = new URIBuilder();
 
-    expectedUri.setScheme((configuration.isGangliaSSL() ? "https" : "http"));
+    expectedUri.setScheme((configuration.isHttpsEnabled() ? "https" : "http"));
     expectedUri.setHost("domU-12-31-39-0E-34-E1.compute-1.internal");
     expectedUri.setPath("/cgi-bin/rrd.py");
     expectedUri.setParameter("c", "HDPFlumeServer,HDPSlaves");
@@ -821,7 +821,7 @@ public class GangliaPropertyProviderTest {
     
     URIBuilder expectedUri = new URIBuilder();
 
-    expectedUri.setScheme((configuration.isGangliaSSL() ? "https" : "http"));
+    expectedUri.setScheme((configuration.isHttpsEnabled() ? "https" : "http"));
     expectedUri.setHost("domU-12-31-39-0E-34-E1.compute-1.internal");
     expectedUri.setPath("/cgi-bin/rrd.py");
     expectedUri.setParameter("c", "HDPFlumeServer,HDPSlaves");
@@ -880,7 +880,7 @@ public class GangliaPropertyProviderTest {
     
     URIBuilder expectedUri = new URIBuilder();
 
-    expectedUri.setScheme((configuration.isGangliaSSL() ? "https" : "http"));
+    expectedUri.setScheme((configuration.isHttpsEnabled() ? "https" : "http"));
     expectedUri.setHost("domU-12-31-39-0E-34-E1.compute-1.internal");
     expectedUri.setPath("/cgi-bin/rrd.py");
     expectedUri.setParameter("c", "HDPFlumeServer,HDPSlaves");

+ 1 - 1
ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaReportPropertyProviderTest.java

@@ -95,7 +95,7 @@ public class GangliaReportPropertyProviderTest {
 
     Assert.assertEquals(1, propertyProvider.populateResources(Collections.singleton(resource), request, null).size());
 
-    String expected = (configuration.isGangliaSSL() ? "https" : "http") + "://domU-12-31-39-0E-34-E1.compute-1.internal/ganglia/graph.php?g=load_report&json=1";
+    String expected = (configuration.isHttpsEnabled() ? "https" : "http") + "://domU-12-31-39-0E-34-E1.compute-1.internal/ganglia/graph.php?g=load_report&json=1";
     Assert.assertEquals(expected, streamProvider.getLastSpec());
 
     Assert.assertEquals(2, PropertyHelper.getProperties(resource).size());

+ 15 - 15
ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProviderTest.java

@@ -215,7 +215,7 @@ public class AMSPropertyProviderTest {
     Resource res = resources.iterator().next();
     Map<String, Object> properties = PropertyHelper.getProperties(resources.iterator().next());
     Assert.assertNotNull(properties);
-    URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder("localhost", 8188);
+    URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder("localhost", 6188, false);
     uriBuilder.addParameter("metricNames", "cpu_user");
     uriBuilder.addParameter("hostname", "h1");
     uriBuilder.addParameter("appId", "HOST");
@@ -264,7 +264,7 @@ public class AMSPropertyProviderTest {
     Resource res = resources.iterator().next();
     Map<String, Object> properties = PropertyHelper.getProperties(res);
     Assert.assertNotNull(properties);
-    URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder("localhost", 8188);
+    URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder("localhost", 6188, false);
     uriBuilder.addParameter("metricNames", "cpu_user");
     uriBuilder.addParameter("hostname", "h1");
     uriBuilder.addParameter("appId", "HOST");
@@ -309,12 +309,12 @@ public class AMSPropertyProviderTest {
     Resource res = resources.iterator().next();
     Map<String, Object> properties = PropertyHelper.getProperties(resources.iterator().next());
     Assert.assertNotNull(properties);
-    URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder("localhost", 8188);
+    URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder("localhost", 6188, false);
     uriBuilder.addParameter("metricNames", "cpu_user,mem_free");
     uriBuilder.addParameter("hostname", "h1");
     uriBuilder.addParameter("appId", "HOST");
 
-    URIBuilder uriBuilder2 = AMSPropertyProvider.getAMSUriBuilder("localhost", 8188);
+    URIBuilder uriBuilder2 = AMSPropertyProvider.getAMSUriBuilder("localhost", 6188, false);
     uriBuilder2.addParameter("metricNames", "mem_free,cpu_user");
     uriBuilder2.addParameter("hostname", "h1");
     uriBuilder2.addParameter("appId", "HOST");
@@ -367,14 +367,14 @@ public class AMSPropertyProviderTest {
     Resource res = resources.iterator().next();
     Map<String, Object> properties = PropertyHelper.getProperties(resources.iterator().next());
     Assert.assertNotNull(properties);
-    URIBuilder uriBuilder1 = AMSPropertyProvider.getAMSUriBuilder("localhost", 8188);
+    URIBuilder uriBuilder1 = AMSPropertyProvider.getAMSUriBuilder("localhost", 6188, false);
     uriBuilder1.addParameter("metricNames", "cpu_user,mem_free");
     uriBuilder1.addParameter("hostname", "h1");
     uriBuilder1.addParameter("appId", "HOST");
     uriBuilder1.addParameter("startTime", "1416445244701");
     uriBuilder1.addParameter("endTime", "1416448936564");
 
-    URIBuilder uriBuilder2 = AMSPropertyProvider.getAMSUriBuilder("localhost", 8188);
+    URIBuilder uriBuilder2 = AMSPropertyProvider.getAMSUriBuilder("localhost", 6188, false);
     uriBuilder2.addParameter("metricNames", "mem_free,cpu_user");
     uriBuilder2.addParameter("hostname", "h1");
     uriBuilder2.addParameter("appId", "HOST");
@@ -435,7 +435,7 @@ public class AMSPropertyProviderTest {
     Resource res = resources.iterator().next();
     Map<String, Object> properties = PropertyHelper.getProperties(resources.iterator().next());
     Assert.assertNotNull(properties);
-    URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder("localhost", 8188);
+    URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder("localhost", 6188, false);
     uriBuilder.addParameter("metricNames", "yarn.QueueMetrics.Queue=root.AvailableMB");
     uriBuilder.addParameter("appId", "RESOURCEMANAGER");
     uriBuilder.addParameter("startTime", "1416528759233");
@@ -484,7 +484,7 @@ public class AMSPropertyProviderTest {
     Resource res = resources.iterator().next();
     Map<String, Object> properties = PropertyHelper.getProperties(resources.iterator().next());
     Assert.assertNotNull(properties);
-    URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder("localhost", 8188);
+    URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder("localhost", 6188, false);
     uriBuilder.addParameter("metricNames", "rpc.rpc.RpcQueueTimeAvgTime");
     uriBuilder.addParameter("appId", "NAMENODE");
     uriBuilder.addParameter("startTime", "1416528759233");
@@ -558,7 +558,7 @@ public class AMSPropertyProviderTest {
     Resource res = resources.iterator().next();
     Map<String, Object> properties = PropertyHelper.getProperties(resources.iterator().next());
     Assert.assertNotNull(properties);
-    URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder("localhost", 8188);
+    URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder("localhost", 6188, false);
     uriBuilder.addParameter("metricNames", "regionserver.Server.totalRequestCount");
     uriBuilder.addParameter("appId", "AMS-HBASE");
     uriBuilder.addParameter("startTime", "1421694000");
@@ -631,7 +631,7 @@ public class AMSPropertyProviderTest {
     Resource res = resources.iterator().next();
     Map<String, Object> properties = PropertyHelper.getProperties(resources.iterator().next());
     Assert.assertNotNull(properties);
-    URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder("localhost", 8188);
+    URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder("localhost", 6188, false);
     uriBuilder.addParameter("metricNames", "rpc.rpc.NumOpenConnections._sum");
     uriBuilder.addParameter("appId", "HBASE");
     uriBuilder.addParameter("startTime", "1429824611300");
@@ -675,7 +675,7 @@ public class AMSPropertyProviderTest {
     Resource res = resources.iterator().next();
     Map<String, Object> properties = PropertyHelper.getProperties(resources.iterator().next());
     Assert.assertNotNull(properties);
-    URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder("localhost", 8188);
+    URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder("localhost", 6188, false);
     uriBuilder.addParameter("metricNames", "cpu_user");
     uriBuilder.addParameter("hostname", "h1");
     uriBuilder.addParameter("appId", "HOST");
@@ -771,7 +771,7 @@ public class AMSPropertyProviderTest {
     Assert.assertNotNull(hostMetricSpec);
     Assert.assertNotNull(hostComponentMetricsSpec);
     // Verify calls
-    URIBuilder uriBuilder1 = AMSPropertyProvider.getAMSUriBuilder("localhost", 8188);
+    URIBuilder uriBuilder1 = AMSPropertyProvider.getAMSUriBuilder("localhost", 6188, false);
     uriBuilder1.addParameter("metricNames", "dfs.datanode.BlocksReplicated");
     uriBuilder1.addParameter("hostname", "h1");
     uriBuilder1.addParameter("appId", "DATANODE");
@@ -779,7 +779,7 @@ public class AMSPropertyProviderTest {
     uriBuilder1.addParameter("endTime", "1416448936464");
     Assert.assertEquals(uriBuilder1.toString(), hostComponentMetricsSpec);
 
-    URIBuilder uriBuilder2 = AMSPropertyProvider.getAMSUriBuilder("localhost", 8188);
+    URIBuilder uriBuilder2 = AMSPropertyProvider.getAMSUriBuilder("localhost", 6188, false);
     uriBuilder2.addParameter("metricNames", "cpu_user");
     uriBuilder2.addParameter("hostname", "h1");
     uriBuilder2.addParameter("appId", "HOST");
@@ -904,7 +904,7 @@ public class AMSPropertyProviderTest {
     Set<String> specs = streamProvider.getAllSpecs();
     Assert.assertEquals(2, specs.size());
 
-    URIBuilder uriBuilder1 = AMSPropertyProvider.getAMSUriBuilder("localhost", 8188);
+    URIBuilder uriBuilder1 = AMSPropertyProvider.getAMSUriBuilder("localhost", 6188, false);
     Number[][] val;
 
     for (String spec : specs) {
@@ -958,7 +958,7 @@ public class AMSPropertyProviderTest {
 
     @Override
     public String getCollectorPort(String clusterName, MetricsService service) throws SystemException {
-      return "8188";
+      return "6188";
     }
 
     @Override

+ 2 - 2
ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/timeline/AMSReportPropertyProviderTest.java

@@ -93,7 +93,7 @@ public class AMSReportPropertyProviderTest {
     Resource res = resources.iterator().next();
     Map<String, Object> properties = PropertyHelper.getProperties(resources.iterator().next());
     Assert.assertNotNull(properties);
-    URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder("localhost", 8188);
+    URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder("localhost", 6188, false);
     uriBuilder.addParameter("metricNames", "cpu_user");
     uriBuilder.addParameter("appId", "HOST");
     uriBuilder.addParameter("startTime", "1416445244800");
@@ -136,7 +136,7 @@ public class AMSReportPropertyProviderTest {
     Resource res = resources.iterator().next();
     Map<String, Object> properties = PropertyHelper.getProperties(resources.iterator().next());
     Assert.assertNotNull(properties);
-    URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder("localhost", 8188);
+    URIBuilder uriBuilder = AMSPropertyProvider.getAMSUriBuilder("localhost", 6188, false);
     uriBuilder.addParameter("metricNames", "cpu_user._sum");
     uriBuilder.addParameter("appId", "HOST");
     uriBuilder.addParameter("startTime", "1432033257812");

+ 8 - 0
ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py

@@ -131,6 +131,14 @@ class TestMetricsCollector(RMFTestCase):
                               configurations = self.getConfig()['configurations']['ams-site'],
                               configuration_attributes = self.getConfig()['configuration_attributes']['ams-hbase-site']
     )
+
+    self.assertResourceCalled('XmlConfig', 'ssl-server.xml',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              conf_dir = '/etc/ambari-metrics-collector/conf',
+                              configurations = self.getConfig()['configurations']['ams-ssl-server'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['ams-ssl-server']
+    )
     merged_ams_hbase_site = {}
     merged_ams_hbase_site.update(self.getConfig()['configurations']['ams-hbase-site'])
     merged_ams_hbase_site['phoenix.query.maxGlobalMemoryPercentage'] = '25'

File diff suppressed because it is too large
+ 406 - 408
ambari-server/src/test/python/stacks/2.0.6/configs/default.json


+ 7 - 0
ambari-server/src/test/python/stacks/2.0.6/configs/default_ams_embedded.json

@@ -811,6 +811,9 @@
         "ams-hbase-log4j": {
             "content": "\n"
         },
+        "ams-ssl-server": {
+            "content": "\n"
+        },
         "ams-site": {
             "timeline.metrics.host.aggregator.minute.ttl": "604800",
             "timeline.metrics.cluster.aggregator.daily.checkpointCutOffMultiplier": "1",
@@ -859,6 +862,7 @@
         "ams-hbase-site": {},
         "ams-hbase-policy": {},
         "ams-hbase-log4j": {},
+        "ams-ssl-server": {},
         "ams-site": {},
         "yarn-site": {
         "final": {
@@ -941,6 +945,9 @@
         "ams-site": {
             "tag": "version1"
         },
+        "ams-ssl-server": {
+            "tag": "version1"
+        },
         "ams-hbase-policy": {
             "tag": "version1"
         },

Some files were not shown because too many files changed in this diff