|
@@ -21,8 +21,6 @@ limitations under the License.
|
|
|
import os
|
|
|
import socket
|
|
|
import sys
|
|
|
-import re
|
|
|
-import json
|
|
|
|
|
|
from ambari_agent.AlertSchedulerHandler import AlertSchedulerHandler
|
|
|
from ambari_agent.alerts.collector import AlertCollector
|
|
@@ -31,6 +29,7 @@ from ambari_agent.alerts.port_alert import PortAlert
|
|
|
from ambari_agent.alerts.script_alert import ScriptAlert
|
|
|
from ambari_agent.alerts.web_alert import WebAlert
|
|
|
from ambari_agent.apscheduler.scheduler import Scheduler
|
|
|
+from ambari_agent.ClusterConfiguration import ClusterConfiguration
|
|
|
|
|
|
from collections import namedtuple
|
|
|
from mock.mock import MagicMock, patch
|
|
@@ -42,7 +41,6 @@ class TestAlerts(TestCase):
|
|
|
# save original open() method for later use
|
|
|
self.original_open = open
|
|
|
|
|
|
-
|
|
|
def tearDown(self):
|
|
|
sys.stdout == sys.__stdout__
|
|
|
|
|
@@ -55,7 +53,12 @@ class TestAlerts(TestCase):
|
|
|
test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
|
|
|
test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')
|
|
|
|
|
|
- ash = AlertSchedulerHandler(test_file_path, test_stack_path, test_common_services_path, test_host_scripts_path, None)
|
|
|
+ cluster_configuration = self.__get_cluster_configuration()
|
|
|
+
|
|
|
+ ash = AlertSchedulerHandler(test_file_path, test_stack_path,
|
|
|
+ test_common_services_path, test_host_scripts_path, cluster_configuration,
|
|
|
+ None)
|
|
|
+
|
|
|
ash.start()
|
|
|
|
|
|
self.assertTrue(aps_add_interval_job_mock.called)
|
|
@@ -64,13 +67,7 @@ class TestAlerts(TestCase):
|
|
|
@patch('time.time')
|
|
|
@patch.object(socket.socket,"connect")
|
|
|
def test_port_alert(self, socket_connect_mock, time_mock):
|
|
|
- # called 3x with 3 calls per alert
|
|
|
- # - 900ms and then a time.time() for the date from base_alert
|
|
|
- # - 2000ms and then a time.time() for the date from base_alert
|
|
|
- # - socket.timeout to simulate a timeout and then a time.time() for the date from base_alert
|
|
|
- time_mock.side_effect = [0,900,336283200000,0,2000,336283200000,socket.timeout,336283200000]
|
|
|
-
|
|
|
- json = { "name": "namenode_process",
|
|
|
+ definition_json = { "name": "namenode_process",
|
|
|
"service": "HDFS",
|
|
|
"component": "NAMENODE",
|
|
|
"label": "NameNode process",
|
|
@@ -98,26 +95,39 @@ class TestAlerts(TestCase):
|
|
|
}
|
|
|
}
|
|
|
|
|
|
+ configuration = { 'hdfs-site' : { 'my-key': 'value1' } }
|
|
|
+
|
|
|
collector = AlertCollector()
|
|
|
+ cluster_configuration = self.__get_cluster_configuration()
|
|
|
+ self.__update_cluster_configuration(cluster_configuration, configuration)
|
|
|
+
|
|
|
+ # called 3x with 3 calls per alert
|
|
|
+ # - 900ms and then a time.time() for the date from base_alert
|
|
|
+ # - 2000ms and then a time.time() for the date from base_alert
|
|
|
+ # - socket.timeout to simulate a timeout and then a time.time() for the date from base_alert
|
|
|
+ time_mock.side_effect = [0,900,336283000000,
|
|
|
+ 0,2000,336283100000,
|
|
|
+ socket.timeout,336283200000]
|
|
|
|
|
|
- pa = PortAlert(json, json['source'])
|
|
|
- pa.set_helpers(collector, {'hdfs-site/my-key': 'value1'})
|
|
|
- self.assertEquals(6, pa.interval())
|
|
|
+ alert = PortAlert(definition_json, definition_json['source'])
|
|
|
+ alert.set_helpers(collector, cluster_configuration)
|
|
|
+ alert.set_cluster("c1", "c6401.ambari.apache.org")
|
|
|
+ self.assertEquals(6, alert.interval())
|
|
|
|
|
|
# 900ms is OK
|
|
|
- pa.collect()
|
|
|
+ alert.collect()
|
|
|
alerts = collector.alerts()
|
|
|
self.assertEquals(0, len(collector.alerts()))
|
|
|
self.assertEquals('OK', alerts[0]['state'])
|
|
|
|
|
|
# 2000ms is WARNING
|
|
|
- pa.collect()
|
|
|
+ alert.collect()
|
|
|
alerts = collector.alerts()
|
|
|
self.assertEquals(0, len(collector.alerts()))
|
|
|
self.assertEquals('WARNING', alerts[0]['state'])
|
|
|
|
|
|
# throws a socket.timeout exception, causes a CRITICAL
|
|
|
- pa.collect()
|
|
|
+ alert.collect()
|
|
|
alerts = collector.alerts()
|
|
|
self.assertEquals(0, len(collector.alerts()))
|
|
|
self.assertEquals('CRITICAL', alerts[0]['state'])
|
|
@@ -125,7 +135,7 @@ class TestAlerts(TestCase):
|
|
|
|
|
|
@patch.object(socket.socket,"connect")
|
|
|
def test_port_alert_complex_uri(self, socket_connect_mock):
|
|
|
- json = { "name": "namenode_process",
|
|
|
+ definition_json = { "name": "namenode_process",
|
|
|
"service": "HDFS",
|
|
|
"component": "NAMENODE",
|
|
|
"label": "NameNode process",
|
|
@@ -148,16 +158,24 @@ class TestAlerts(TestCase):
|
|
|
}
|
|
|
}
|
|
|
|
|
|
+ configuration = {'hdfs-site' :
|
|
|
+ { 'my-key': 'c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181,c6403.ambari.apache.org:2181'}
|
|
|
+ }
|
|
|
+
|
|
|
collector = AlertCollector()
|
|
|
+ cluster_configuration = self.__get_cluster_configuration()
|
|
|
+ self.__update_cluster_configuration(cluster_configuration, configuration)
|
|
|
|
|
|
- pa = PortAlert(json, json['source'])
|
|
|
+ alert = PortAlert(definition_json, definition_json['source'])
|
|
|
+ alert.set_helpers(collector, cluster_configuration)
|
|
|
+ alert.set_cluster("c1", "c6402.ambari.apache.org")
|
|
|
|
|
|
# use a URI that has commas to verify that we properly parse it
|
|
|
- pa.set_helpers(collector, {'hdfs-site/my-key': 'c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181,c6403.ambari.apache.org:2181'})
|
|
|
- pa.host_name = 'c6402.ambari.apache.org'
|
|
|
- self.assertEquals(6, pa.interval())
|
|
|
+ alert.set_helpers(collector, cluster_configuration)
|
|
|
+ alert.set_cluster("c1", "c6401.ambari.apache.org")
|
|
|
+ self.assertEquals(6, alert.interval())
|
|
|
|
|
|
- pa.collect()
|
|
|
+ alert.collect()
|
|
|
|
|
|
alerts = collector.alerts()
|
|
|
self.assertEquals(0, len(collector.alerts()))
|
|
@@ -168,7 +186,7 @@ class TestAlerts(TestCase):
|
|
|
|
|
|
|
|
|
def test_port_alert_no_sub(self):
|
|
|
- json = { "name": "namenode_process",
|
|
|
+ definition_json = { "name": "namenode_process",
|
|
|
"service": "HDFS",
|
|
|
"component": "NAMENODE",
|
|
|
"label": "NameNode process",
|
|
@@ -191,16 +209,19 @@ class TestAlerts(TestCase):
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- pa = PortAlert(json, json['source'])
|
|
|
- pa.set_helpers(AlertCollector(), '')
|
|
|
- self.assertEquals('http://c6401.ambari.apache.org', pa.uri)
|
|
|
+ cluster_configuration = self.__get_cluster_configuration()
|
|
|
+
|
|
|
+ alert = PortAlert(definition_json, definition_json['source'])
|
|
|
+ alert.set_helpers(AlertCollector(), cluster_configuration)
|
|
|
+ alert.set_cluster("c1", "c6401.ambari.apache.org")
|
|
|
|
|
|
- pa.collect()
|
|
|
+ self.assertEquals('http://c6401.ambari.apache.org', alert.uri)
|
|
|
+
|
|
|
+ alert.collect()
|
|
|
|
|
|
|
|
|
- @patch.object(re, 'match', new = MagicMock())
|
|
|
def test_script_alert(self):
|
|
|
- json = {
|
|
|
+ definition_json = {
|
|
|
"name": "namenode_process",
|
|
|
"service": "HDFS",
|
|
|
"component": "NAMENODE",
|
|
@@ -216,19 +237,28 @@ class TestAlerts(TestCase):
|
|
|
}
|
|
|
|
|
|
# normally set by AlertSchedulerHandler
|
|
|
- json['source']['stacks_directory'] = os.path.join('ambari_agent', 'dummy_files')
|
|
|
- json['source']['common_services_directory'] = os.path.join('ambari_agent', 'common-services')
|
|
|
- json['source']['host_scripts_directory'] = os.path.join('ambari_agent', 'host_scripts')
|
|
|
+ definition_json['source']['stacks_directory'] = os.path.join('ambari_agent', 'dummy_files')
|
|
|
+ definition_json['source']['common_services_directory'] = os.path.join('ambari_agent', 'common-services')
|
|
|
+ definition_json['source']['host_scripts_directory'] = os.path.join('ambari_agent', 'host_scripts')
|
|
|
+
|
|
|
+ configuration = {'foo-site' :
|
|
|
+ { 'bar': 'rendered-bar', 'baz' : 'rendered-baz' }
|
|
|
+ }
|
|
|
|
|
|
collector = AlertCollector()
|
|
|
- sa = ScriptAlert(json, json['source'], MagicMock())
|
|
|
- sa.set_helpers(collector, {'foo-site/bar': 'rendered-bar', 'foo-site/baz':'rendered-baz'} )
|
|
|
- self.assertEquals(json['source']['path'], sa.path)
|
|
|
- self.assertEquals(json['source']['stacks_directory'], sa.stacks_dir)
|
|
|
- self.assertEquals(json['source']['common_services_directory'], sa.common_services_dir)
|
|
|
- self.assertEquals(json['source']['host_scripts_directory'], sa.host_scripts_dir)
|
|
|
+ cluster_configuration = self.__get_cluster_configuration()
|
|
|
+ self.__update_cluster_configuration(cluster_configuration, configuration)
|
|
|
+
|
|
|
+ alert = ScriptAlert(definition_json, definition_json['source'], MagicMock())
|
|
|
+ alert.set_helpers(collector, cluster_configuration )
|
|
|
+ alert.set_cluster("c1", "c6401.ambari.apache.org")
|
|
|
+
|
|
|
+ self.assertEquals(definition_json['source']['path'], alert.path)
|
|
|
+ self.assertEquals(definition_json['source']['stacks_directory'], alert.stacks_dir)
|
|
|
+ self.assertEquals(definition_json['source']['common_services_directory'], alert.common_services_dir)
|
|
|
+ self.assertEquals(definition_json['source']['host_scripts_directory'], alert.host_scripts_dir)
|
|
|
|
|
|
- sa.collect()
|
|
|
+ alert.collect()
|
|
|
|
|
|
alerts = collector.alerts()
|
|
|
self.assertEquals(0, len(collector.alerts()))
|
|
@@ -239,7 +269,7 @@ class TestAlerts(TestCase):
|
|
|
|
|
|
@patch.object(MetricAlert, "_load_jmx")
|
|
|
def test_metric_alert(self, ma_load_jmx_mock):
|
|
|
- json = {
|
|
|
+ definition_json = {
|
|
|
"name": "cpu_check",
|
|
|
"service": "HDFS",
|
|
|
"component": "NAMENODE",
|
|
@@ -278,10 +308,19 @@ class TestAlerts(TestCase):
|
|
|
|
|
|
ma_load_jmx_mock.return_value = [1, 3]
|
|
|
|
|
|
+ configuration = {'hdfs-site' :
|
|
|
+ { 'dfs.datanode.http.address': '1.2.3.4:80'}
|
|
|
+ }
|
|
|
+
|
|
|
collector = AlertCollector()
|
|
|
- ma = MetricAlert(json, json['source'])
|
|
|
- ma.set_helpers(collector, {'hdfs-site/dfs.datanode.http.address': '1.2.3.4:80'})
|
|
|
- ma.collect()
|
|
|
+ cluster_configuration = self.__get_cluster_configuration()
|
|
|
+ self.__update_cluster_configuration(cluster_configuration, configuration)
|
|
|
+
|
|
|
+ alert = MetricAlert(definition_json, definition_json['source'])
|
|
|
+ alert.set_helpers(collector, cluster_configuration)
|
|
|
+ alert.set_cluster("c1", "c6401.ambari.apache.org")
|
|
|
+
|
|
|
+ alert.collect()
|
|
|
|
|
|
alerts = collector.alerts()
|
|
|
self.assertEquals(0, len(collector.alerts()))
|
|
@@ -289,11 +328,14 @@ class TestAlerts(TestCase):
|
|
|
self.assertEquals('CRITICAL', alerts[0]['state'])
|
|
|
self.assertEquals('(Unit Tests) crit_arr: 1 3 223', alerts[0]['text'])
|
|
|
|
|
|
- del json['source']['jmx']['value']
|
|
|
+ del definition_json['source']['jmx']['value']
|
|
|
collector = AlertCollector()
|
|
|
- ma = MetricAlert(json, json['source'])
|
|
|
- ma.set_helpers(collector, {'hdfs-site/dfs.datanode.http.address': '1.2.3.4:80'})
|
|
|
- ma.collect()
|
|
|
+
|
|
|
+ alert = MetricAlert(definition_json, definition_json['source'])
|
|
|
+ alert.set_helpers(collector, cluster_configuration)
|
|
|
+ alert.set_cluster("c1", "c6401.ambari.apache.org")
|
|
|
+
|
|
|
+ alert.collect()
|
|
|
|
|
|
alerts = collector.alerts()
|
|
|
self.assertEquals(0, len(collector.alerts()))
|
|
@@ -304,7 +346,7 @@ class TestAlerts(TestCase):
|
|
|
|
|
|
@patch.object(MetricAlert, "_load_jmx")
|
|
|
def test_alert_uri_structure(self, ma_load_jmx_mock):
|
|
|
- json = {
|
|
|
+ definition_json = {
|
|
|
"name": "cpu_check",
|
|
|
"service": "HDFS",
|
|
|
"component": "NAMENODE",
|
|
@@ -349,52 +391,81 @@ class TestAlerts(TestCase):
|
|
|
# run the alert without specifying any keys; an exception should be thrown
|
|
|
# indicating that there was no URI and the result is UNKNOWN
|
|
|
collector = AlertCollector()
|
|
|
- ma = MetricAlert(json, json['source'])
|
|
|
- ma.set_helpers(collector, '')
|
|
|
- ma.collect()
|
|
|
+ cluster_configuration = self.__get_cluster_configuration()
|
|
|
+ alert = MetricAlert(definition_json, definition_json['source'])
|
|
|
+ alert.set_helpers(collector, cluster_configuration)
|
|
|
+ alert.set_cluster("c1", "c6401.ambari.apache.org")
|
|
|
+ alert.collect()
|
|
|
|
|
|
self.assertEquals('UNKNOWN', collector.alerts()[0]['state'])
|
|
|
|
|
|
- # set 2 properties that make no sense wihtout the main URI properties
|
|
|
+ # set properties that make no sense wihtout the main URI properties
|
|
|
+ configuration = {'hdfs-site' :
|
|
|
+ { 'dfs.http.policy' : 'HTTP_ONLY'}
|
|
|
+ }
|
|
|
+
|
|
|
collector = AlertCollector()
|
|
|
- ma = MetricAlert(json, json['source'])
|
|
|
- ma.set_helpers(collector, {'hdfs-site/dfs.http.policy': 'HTTP_ONLY'})
|
|
|
- ma.collect()
|
|
|
+ cluster_configuration = self.__get_cluster_configuration()
|
|
|
+ self.__update_cluster_configuration(cluster_configuration, configuration)
|
|
|
+
|
|
|
+ alert = MetricAlert(definition_json, definition_json['source'])
|
|
|
+ alert.set_helpers(collector, cluster_configuration)
|
|
|
+ alert.set_cluster("c1", "c6401.ambari.apache.org")
|
|
|
+ alert.collect()
|
|
|
|
|
|
self.assertEquals('UNKNOWN', collector.alerts()[0]['state'])
|
|
|
|
|
|
# set an actual property key (http)
|
|
|
+ configuration = {'hdfs-site' :
|
|
|
+ { 'dfs.http.policy' : 'HTTP_ONLY', 'dfs.datanode.http.address' : '1.2.3.4:80' }
|
|
|
+ }
|
|
|
+
|
|
|
+ self.__update_cluster_configuration(cluster_configuration, configuration)
|
|
|
+
|
|
|
collector = AlertCollector()
|
|
|
- ma = MetricAlert(json, json['source'])
|
|
|
- ma.set_helpers(collector, {'hdfs-site/dfs.datanode.http.address': '1.2.3.4:80',
|
|
|
- 'hdfs-site/dfs.http.policy': 'HTTP_ONLY'})
|
|
|
- ma.collect()
|
|
|
+ alert = MetricAlert(definition_json, definition_json['source'])
|
|
|
+ alert.set_helpers(collector, cluster_configuration)
|
|
|
+ alert.set_cluster("c1", "c6401.ambari.apache.org")
|
|
|
+ alert.collect()
|
|
|
|
|
|
self.assertEquals('OK', collector.alerts()[0]['state'])
|
|
|
|
|
|
# set an actual property key (https)
|
|
|
+ configuration = {'hdfs-site' :
|
|
|
+ { 'dfs.http.policy' : 'HTTP_ONLY', 'dfs.datanode.https.address' : '1.2.3.4:443' }
|
|
|
+ }
|
|
|
+
|
|
|
+ self.__update_cluster_configuration(cluster_configuration, configuration)
|
|
|
+
|
|
|
collector = AlertCollector()
|
|
|
- ma = MetricAlert(json, json['source'])
|
|
|
- ma.set_helpers(collector, {'hdfs-site/dfs.datanode.https.address': '1.2.3.4:443',
|
|
|
- 'hdfs-site/dfs.http.policy': 'HTTP_ONLY'})
|
|
|
- ma.collect()
|
|
|
+ alert = MetricAlert(definition_json, definition_json['source'])
|
|
|
+ alert.set_helpers(collector, cluster_configuration)
|
|
|
+ alert.set_cluster("c1", "c6401.ambari.apache.org")
|
|
|
+ alert.collect()
|
|
|
|
|
|
self.assertEquals('OK', collector.alerts()[0]['state'])
|
|
|
|
|
|
# set both (http and https)
|
|
|
+ configuration = {'hdfs-site' :
|
|
|
+ { 'dfs.http.policy' : 'HTTP_ONLY',
|
|
|
+ 'dfs.datanode.http.address' : '1.2.3.4:80',
|
|
|
+ 'dfs.datanode.https.address' : '1.2.3.4:443' }
|
|
|
+ }
|
|
|
+
|
|
|
+ self.__update_cluster_configuration(cluster_configuration, configuration)
|
|
|
+
|
|
|
collector = AlertCollector()
|
|
|
- ma = MetricAlert(json, json['source'])
|
|
|
- ma.set_helpers(collector, {'hdfs-site/dfs.datanode.http.address': '1.2.3.4:80',
|
|
|
- 'hdfs-site/dfs.datanode.https.address': '1.2.3.4:443',
|
|
|
- 'hdfs-site/dfs.http.policy': 'HTTP_ONLY'})
|
|
|
- ma.collect()
|
|
|
+ alert = MetricAlert(definition_json, definition_json['source'])
|
|
|
+ alert.set_helpers(collector, cluster_configuration)
|
|
|
+ alert.set_cluster("c1", "c6401.ambari.apache.org")
|
|
|
+ alert.collect()
|
|
|
|
|
|
self.assertEquals('OK', collector.alerts()[0]['state'])
|
|
|
|
|
|
|
|
|
@patch.object(WebAlert, "_make_web_request")
|
|
|
def test_web_alert(self, wa_make_web_request_mock):
|
|
|
- json = {
|
|
|
+ definition_json = {
|
|
|
"name": "webalert_test",
|
|
|
"service": "HDFS",
|
|
|
"component": "DATANODE",
|
|
@@ -429,9 +500,17 @@ class TestAlerts(TestCase):
|
|
|
wa_make_web_request_mock.return_value = WebResponse(200,1.234,None)
|
|
|
|
|
|
# run the alert and check HTTP 200
|
|
|
+ configuration = {'hdfs-site' :
|
|
|
+ { 'dfs.datanode.http.address' : '1.2.3.4:80' }
|
|
|
+ }
|
|
|
+
|
|
|
collector = AlertCollector()
|
|
|
- alert = WebAlert(json, json['source'])
|
|
|
- alert.set_helpers(collector, {'hdfs-site/dfs.datanode.http.address': '1.2.3.4:80'})
|
|
|
+ cluster_configuration = self.__get_cluster_configuration()
|
|
|
+ self.__update_cluster_configuration(cluster_configuration, configuration)
|
|
|
+
|
|
|
+ alert = WebAlert(definition_json, definition_json['source'])
|
|
|
+ alert.set_helpers(collector, cluster_configuration)
|
|
|
+ alert.set_cluster("c1", "c6401.ambari.apache.org")
|
|
|
alert.collect()
|
|
|
|
|
|
alerts = collector.alerts()
|
|
@@ -441,10 +520,13 @@ class TestAlerts(TestCase):
|
|
|
self.assertEquals('OK', alerts[0]['state'])
|
|
|
|
|
|
# run the alert and check HTTP 500
|
|
|
+
|
|
|
+
|
|
|
wa_make_web_request_mock.return_value = WebResponse(500,1.234,None)
|
|
|
collector = AlertCollector()
|
|
|
- alert = WebAlert(json, json['source'])
|
|
|
- alert.set_helpers(collector, {'hdfs-site/dfs.datanode.http.address': '1.2.3.4:80'})
|
|
|
+ alert = WebAlert(definition_json, definition_json['source'])
|
|
|
+ alert.set_helpers(collector, cluster_configuration)
|
|
|
+ alert.set_cluster("c1", "c6401.ambari.apache.org")
|
|
|
alert.collect()
|
|
|
|
|
|
alerts = collector.alerts()
|
|
@@ -457,8 +539,9 @@ class TestAlerts(TestCase):
|
|
|
wa_make_web_request_mock.return_value = WebResponse(0,0,'error message')
|
|
|
|
|
|
collector = AlertCollector()
|
|
|
- alert = WebAlert(json, json['source'])
|
|
|
- alert.set_helpers(collector, {'hdfs-site/dfs.datanode.http.address': '1.2.3.4:80'})
|
|
|
+ alert = WebAlert(definition_json, definition_json['source'])
|
|
|
+ alert.set_helpers(collector, cluster_configuration)
|
|
|
+ alert.set_cluster("c1", "c6401.ambari.apache.org")
|
|
|
alert.collect()
|
|
|
|
|
|
alerts = collector.alerts()
|
|
@@ -467,13 +550,19 @@ class TestAlerts(TestCase):
|
|
|
# http assertion indicating that we properly determined non-SSL
|
|
|
self.assertEquals('CRITICAL', alerts[0]['state'])
|
|
|
self.assertEquals('(Unit Tests) critical: http://1.2.3.4:80. error message', alerts[0]['text'])
|
|
|
-
|
|
|
+
|
|
|
+ configuration = {'hdfs-site' :
|
|
|
+ { 'dfs.http.policy' : 'HTTPS_ONLY',
|
|
|
+ 'dfs.datanode.http.address' : '1.2.3.4:80',
|
|
|
+ 'dfs.datanode.https.address' : '1.2.3.4:443' }
|
|
|
+ }
|
|
|
+
|
|
|
+ self.__update_cluster_configuration(cluster_configuration, configuration)
|
|
|
+
|
|
|
collector = AlertCollector()
|
|
|
- alert = WebAlert(json, json['source'])
|
|
|
- alert.set_helpers(collector, {
|
|
|
- 'hdfs-site/dfs.datanode.http.address': '1.2.3.4:80',
|
|
|
- 'hdfs-site/dfs.datanode.https.address': '1.2.3.4:8443',
|
|
|
- 'hdfs-site/dfs.http.policy': 'HTTPS_ONLY'})
|
|
|
+ alert = WebAlert(definition_json, definition_json['source'])
|
|
|
+ alert.set_helpers(collector, cluster_configuration)
|
|
|
+ alert.set_cluster("c1", "c6401.ambari.apache.org")
|
|
|
|
|
|
alert.collect()
|
|
|
|
|
@@ -482,15 +571,20 @@ class TestAlerts(TestCase):
|
|
|
|
|
|
# SSL assertion
|
|
|
self.assertEquals('CRITICAL', alerts[0]['state'])
|
|
|
- self.assertEquals('(Unit Tests) critical: https://1.2.3.4:8443. error message', alerts[0]['text'])
|
|
|
+ self.assertEquals('(Unit Tests) critical: https://1.2.3.4:443. error message', alerts[0]['text'])
|
|
|
|
|
|
def test_reschedule(self):
|
|
|
test_file_path = os.path.join('ambari_agent', 'dummy_files')
|
|
|
test_stack_path = os.path.join('ambari_agent', 'dummy_files')
|
|
|
test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
|
|
|
test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')
|
|
|
-
|
|
|
- ash = AlertSchedulerHandler(test_file_path, test_stack_path, test_common_services_path, test_host_scripts_path, None)
|
|
|
+
|
|
|
+ cluster_configuration = self.__get_cluster_configuration()
|
|
|
+
|
|
|
+ ash = AlertSchedulerHandler(test_file_path, test_stack_path,
|
|
|
+ test_common_services_path, test_host_scripts_path, cluster_configuration,
|
|
|
+ None)
|
|
|
+
|
|
|
ash.start()
|
|
|
|
|
|
self.assertEquals(1, ash.get_job_count())
|
|
@@ -499,7 +593,7 @@ class TestAlerts(TestCase):
|
|
|
|
|
|
|
|
|
def test_alert_collector_purge(self):
|
|
|
- json = { "name": "namenode_process",
|
|
|
+ definition_json = { "name": "namenode_process",
|
|
|
"service": "HDFS",
|
|
|
"component": "NAMENODE",
|
|
|
"label": "NameNode process",
|
|
@@ -522,13 +616,20 @@ class TestAlerts(TestCase):
|
|
|
}
|
|
|
}
|
|
|
|
|
|
+ configuration = {'hdfs-site' :
|
|
|
+ { 'my-key': 'value1' }
|
|
|
+ }
|
|
|
+
|
|
|
collector = AlertCollector()
|
|
|
+ cluster_configuration = self.__get_cluster_configuration()
|
|
|
+ self.__update_cluster_configuration(cluster_configuration, configuration)
|
|
|
|
|
|
- pa = PortAlert(json, json['source'])
|
|
|
- pa.set_helpers(collector, {'hdfs-site/my-key': 'value1'})
|
|
|
- self.assertEquals(6, pa.interval())
|
|
|
+ alert = PortAlert(definition_json, definition_json['source'])
|
|
|
+ alert.set_helpers(collector, cluster_configuration)
|
|
|
+ alert.set_cluster("c1", "c6401.ambari.apache.org")
|
|
|
+ self.assertEquals(6, alert.interval())
|
|
|
|
|
|
- res = pa.collect()
|
|
|
+ res = alert.collect()
|
|
|
|
|
|
alerts = collector.alerts()
|
|
|
self.assertEquals(0, len(collector.alerts()))
|
|
@@ -546,12 +647,17 @@ class TestAlerts(TestCase):
|
|
|
test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
|
|
|
test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')
|
|
|
|
|
|
- ash = AlertSchedulerHandler(test_file_path, test_stack_path, test_common_services_path, test_host_scripts_path, None)
|
|
|
+ cluster_configuration = self.__get_cluster_configuration()
|
|
|
+
|
|
|
+ ash = AlertSchedulerHandler(test_file_path, test_stack_path,
|
|
|
+ test_common_services_path, test_host_scripts_path, cluster_configuration,
|
|
|
+ None)
|
|
|
+
|
|
|
ash.start()
|
|
|
|
|
|
self.assertEquals(1, ash.get_job_count())
|
|
|
|
|
|
- json = { "name": "namenode_process",
|
|
|
+ definition_json = { "name": "namenode_process",
|
|
|
"service": "HDFS",
|
|
|
"component": "NAMENODE",
|
|
|
"label": "NameNode process",
|
|
@@ -574,20 +680,20 @@ class TestAlerts(TestCase):
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- pa = PortAlert(json, json['source'])
|
|
|
- ash.schedule_definition(pa)
|
|
|
+ alert = PortAlert(definition_json, definition_json['source'])
|
|
|
+ ash.schedule_definition(alert)
|
|
|
|
|
|
self.assertEquals(2, ash.get_job_count())
|
|
|
|
|
|
- json['enabled'] = False
|
|
|
- pa = PortAlert(json, json['source'])
|
|
|
- ash.schedule_definition(pa)
|
|
|
+ definition_json['enabled'] = False
|
|
|
+ alert = PortAlert(definition_json, definition_json['source'])
|
|
|
+ ash.schedule_definition(alert)
|
|
|
|
|
|
# verify disabled alert not scheduled
|
|
|
self.assertEquals(2, ash.get_job_count())
|
|
|
|
|
|
- json['enabled'] = True
|
|
|
- pa = PortAlert(json, json['source'])
|
|
|
+ definition_json['enabled'] = True
|
|
|
+ pa = PortAlert(definition_json, definition_json['source'])
|
|
|
ash.schedule_definition(pa)
|
|
|
|
|
|
# verify enabled alert was scheduled
|
|
@@ -599,9 +705,14 @@ class TestAlerts(TestCase):
|
|
|
test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
|
|
|
test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')
|
|
|
|
|
|
- ash = AlertSchedulerHandler(test_file_path, test_stack_path, test_common_services_path, test_host_scripts_path, None)
|
|
|
+ cluster_configuration = self.__get_cluster_configuration()
|
|
|
+ ash = AlertSchedulerHandler(test_file_path, test_stack_path,
|
|
|
+ test_common_services_path, test_host_scripts_path, cluster_configuration,
|
|
|
+ None)
|
|
|
+
|
|
|
ash.start()
|
|
|
|
|
|
+
|
|
|
self.assertEquals(1, ash.get_job_count())
|
|
|
self.assertEquals(0, len(ash._collector.alerts()))
|
|
|
|
|
@@ -639,7 +750,7 @@ class TestAlerts(TestCase):
|
|
|
|
|
|
|
|
|
def test_skipped_alert(self):
|
|
|
- json = {
|
|
|
+ definition_json = {
|
|
|
"name": "namenode_process",
|
|
|
"service": "HDFS",
|
|
|
"component": "NAMENODE",
|
|
@@ -655,27 +766,35 @@ class TestAlerts(TestCase):
|
|
|
}
|
|
|
|
|
|
# normally set by AlertSchedulerHandler
|
|
|
- json['source']['stacks_directory'] = os.path.join('ambari_agent', 'dummy_files')
|
|
|
- json['source']['common_services_directory'] = os.path.join('ambari_agent', 'common-services')
|
|
|
- json['source']['host_scripts_directory'] = os.path.join('ambari_agent', 'host_scripts')
|
|
|
+ definition_json['source']['stacks_directory'] = os.path.join('ambari_agent', 'dummy_files')
|
|
|
+ definition_json['source']['common_services_directory'] = os.path.join('ambari_agent', 'common-services')
|
|
|
+ definition_json['source']['host_scripts_directory'] = os.path.join('ambari_agent', 'host_scripts')
|
|
|
+
|
|
|
+ configuration = {'foo-site' :
|
|
|
+ { 'skip': 'true' }
|
|
|
+ }
|
|
|
|
|
|
collector = AlertCollector()
|
|
|
- sa = ScriptAlert(json, json['source'], None)
|
|
|
+ cluster_configuration = self.__get_cluster_configuration()
|
|
|
+ self.__update_cluster_configuration(cluster_configuration, configuration)
|
|
|
+
|
|
|
+ alert = ScriptAlert(definition_json, definition_json['source'], None)
|
|
|
|
|
|
# instruct the test alert script to be skipped
|
|
|
- sa.set_helpers(collector, {'foo-site/skip': 'true'} )
|
|
|
+ alert.set_helpers(collector, cluster_configuration )
|
|
|
+ alert.set_cluster("c1", "c6401.ambari.apache.org")
|
|
|
|
|
|
- self.assertEquals(json['source']['path'], sa.path)
|
|
|
- self.assertEquals(json['source']['stacks_directory'], sa.stacks_dir)
|
|
|
- self.assertEquals(json['source']['common_services_directory'], sa.common_services_dir)
|
|
|
- self.assertEquals(json['source']['host_scripts_directory'], sa.host_scripts_dir)
|
|
|
+ self.assertEquals(definition_json['source']['path'], alert.path)
|
|
|
+ self.assertEquals(definition_json['source']['stacks_directory'], alert.stacks_dir)
|
|
|
+ self.assertEquals(definition_json['source']['common_services_directory'], alert.common_services_dir)
|
|
|
+ self.assertEquals(definition_json['source']['host_scripts_directory'], alert.host_scripts_dir)
|
|
|
|
|
|
# ensure that it was skipped
|
|
|
self.assertEquals(0,len(collector.alerts()))
|
|
|
|
|
|
|
|
|
def test_default_reporting_text(self):
|
|
|
- json = {
|
|
|
+ definition_json = {
|
|
|
"name": "namenode_process",
|
|
|
"service": "HDFS",
|
|
|
"component": "NAMENODE",
|
|
@@ -690,100 +809,116 @@ class TestAlerts(TestCase):
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- alert = ScriptAlert(json, json['source'], None)
|
|
|
+ alert = ScriptAlert(definition_json, definition_json['source'], None)
|
|
|
self.assertEquals(alert._get_reporting_text(alert.RESULT_OK), '{0}')
|
|
|
self.assertEquals(alert._get_reporting_text(alert.RESULT_WARNING), '{0}')
|
|
|
self.assertEquals(alert._get_reporting_text(alert.RESULT_CRITICAL), '{0}')
|
|
|
|
|
|
- json['source']['type'] = 'PORT'
|
|
|
- alert = PortAlert(json, json['source'])
|
|
|
+ definition_json['source']['type'] = 'PORT'
|
|
|
+ alert = PortAlert(definition_json, definition_json['source'])
|
|
|
self.assertEquals(alert._get_reporting_text(alert.RESULT_OK), 'TCP OK - {0:.4f} response on port {1}')
|
|
|
self.assertEquals(alert._get_reporting_text(alert.RESULT_WARNING), 'TCP OK - {0:.4f} response on port {1}')
|
|
|
self.assertEquals(alert._get_reporting_text(alert.RESULT_CRITICAL), 'Connection failed: {0} to {1}:{2}')
|
|
|
|
|
|
- json['source']['type'] = 'WEB'
|
|
|
- alert = WebAlert(json, json['source'])
|
|
|
+ definition_json['source']['type'] = 'WEB'
|
|
|
+ alert = WebAlert(definition_json, definition_json['source'])
|
|
|
self.assertEquals(alert._get_reporting_text(alert.RESULT_OK), 'HTTP {0} response in {2:.4f} seconds')
|
|
|
self.assertEquals(alert._get_reporting_text(alert.RESULT_WARNING), 'HTTP {0} response in {2:.4f} seconds')
|
|
|
self.assertEquals(alert._get_reporting_text(alert.RESULT_CRITICAL), 'Connection failed to {1}')
|
|
|
|
|
|
- json['source']['type'] = 'METRIC'
|
|
|
- alert = MetricAlert(json, json['source'])
|
|
|
+ definition_json['source']['type'] = 'METRIC'
|
|
|
+ alert = MetricAlert(definition_json, definition_json['source'])
|
|
|
self.assertEquals(alert._get_reporting_text(alert.RESULT_OK), '{0}')
|
|
|
self.assertEquals(alert._get_reporting_text(alert.RESULT_WARNING), '{0}')
|
|
|
self.assertEquals(alert._get_reporting_text(alert.RESULT_CRITICAL), '{0}')
|
|
|
-
|
|
|
- @patch("json.dump")
|
|
|
- def test_update_configurations(self, json_mock):
|
|
|
|
|
|
- def open_side_effect(file, mode):
|
|
|
- if mode == 'w':
|
|
|
- file_mock = MagicMock()
|
|
|
- return file_mock
|
|
|
- else:
|
|
|
- return self.original_open(file, mode)
|
|
|
|
|
|
- test_file_path = os.path.join('ambari_agent', 'dummy_files')
|
|
|
- test_stack_path = os.path.join('ambari_agent', 'dummy_files')
|
|
|
- test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
|
|
|
- test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')
|
|
|
+ def test_configuration_updates(self):
|
|
|
+ definition_json = {
|
|
|
+ "name": "namenode_process",
|
|
|
+ "service": "HDFS",
|
|
|
+ "component": "NAMENODE",
|
|
|
+ "label": "NameNode process",
|
|
|
+ "interval": 6,
|
|
|
+ "scope": "host",
|
|
|
+ "enabled": True,
|
|
|
+ "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1",
|
|
|
+ "source": {
|
|
|
+ "type": "SCRIPT",
|
|
|
+ "path": "test_script.py",
|
|
|
+ }
|
|
|
+ }
|
|
|
|
|
|
- commands = [{"clusterName": "c1",
|
|
|
- "configurations": {
|
|
|
- "hdfs-site": {
|
|
|
- "dfs.namenode.http-address": "c6401.ambari.apache.org:50071"
|
|
|
- }
|
|
|
- }}]
|
|
|
- with open(os.path.join(test_stack_path, "definitions.json"),"r") as fp:
|
|
|
- all_commands = json.load(fp)
|
|
|
- all_commands[0]['configurations']['hdfs-site'].update({"dfs.namenode.http-address": "c6401.ambari.apache.org:50071"})
|
|
|
-
|
|
|
- ash = AlertSchedulerHandler(test_file_path, test_stack_path, test_common_services_path, test_host_scripts_path, None)
|
|
|
- ash.start()
|
|
|
+ # normally set by AlertSchedulerHandler
|
|
|
+ definition_json['source']['stacks_directory'] = os.path.join('ambari_agent', 'dummy_files')
|
|
|
+ definition_json['source']['common_services_directory'] = os.path.join('ambari_agent', 'common-services')
|
|
|
+ definition_json['source']['host_scripts_directory'] = os.path.join('ambari_agent', 'host_scripts')
|
|
|
|
|
|
- with patch("__builtin__.open") as open_mock:
|
|
|
- open_mock.side_effect = open_side_effect
|
|
|
- ash.update_configurations(commands)
|
|
|
+ configuration = {'foo-site' :
|
|
|
+ { 'bar': 'rendered-bar', 'baz' : 'rendered-baz' }
|
|
|
+ }
|
|
|
|
|
|
- self.assertTrue(json_mock.called)
|
|
|
- self.assertTrue(json_mock.called_with(all_commands))
|
|
|
+ # populate the configuration cache with the initial configs
|
|
|
+ collector = AlertCollector()
|
|
|
+ cluster_configuration = self.__get_cluster_configuration()
|
|
|
+ self.__update_cluster_configuration(cluster_configuration, configuration)
|
|
|
|
|
|
+ # run the alert and verify the output
|
|
|
+ alert = ScriptAlert(definition_json, definition_json['source'], MagicMock())
|
|
|
+ alert.set_helpers(collector, cluster_configuration )
|
|
|
+ alert.set_cluster("c1", "c6401.ambari.apache.org")
|
|
|
+ alert.collect()
|
|
|
|
|
|
- @patch.object(AlertSchedulerHandler,"reschedule_all")
|
|
|
- @patch("json.dump")
|
|
|
- def test_update_configurations_without_reschedule(self, json_mock, reschedule_mock):
|
|
|
+ alerts = collector.alerts()
|
|
|
+ self.assertEquals(0, len(collector.alerts()))
|
|
|
|
|
|
- def open_side_effect(file, mode):
|
|
|
- if mode == 'w':
|
|
|
- file_mock = MagicMock()
|
|
|
- return file_mock
|
|
|
- else:
|
|
|
- return self.original_open(file, mode)
|
|
|
+ self.assertEquals('WARNING', alerts[0]['state'])
|
|
|
+ self.assertEquals('bar is rendered-bar, baz is rendered-baz', alerts[0]['text'])
|
|
|
|
|
|
- test_file_path = os.path.join('ambari_agent', 'dummy_files')
|
|
|
- test_stack_path = os.path.join('ambari_agent', 'dummy_files')
|
|
|
- test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
|
|
|
- test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')
|
|
|
+ # now update only the configs and run the same alert again and check
|
|
|
+ # for different output
|
|
|
+ configuration = {'foo-site' :
|
|
|
+ { 'bar': 'rendered-bar2', 'baz' : 'rendered-baz2' }
|
|
|
+ }
|
|
|
|
|
|
- with open(os.path.join(test_stack_path, "definitions.json"),"r") as fp:
|
|
|
- all_commands = json.load(fp)
|
|
|
+ # populate the configuration cache with the initial configs
|
|
|
+ self.__update_cluster_configuration(cluster_configuration, configuration)
|
|
|
|
|
|
- # create a copy of the configurations from definitions.json, then add
|
|
|
- # a brand new property - this should not cause a restart since there are
|
|
|
- # no alerts that use this new property
|
|
|
- commands = [{"clusterName": "c1" }]
|
|
|
- commands[0]['configurations'] = all_commands[0]['configurations']
|
|
|
- commands[0]['configurations'].update({ "foo" : "bar" })
|
|
|
+ alert.collect()
|
|
|
|
|
|
- ash = AlertSchedulerHandler(test_file_path, test_stack_path,
|
|
|
- test_common_services_path, test_host_scripts_path, None)
|
|
|
+ alerts = collector.alerts()
|
|
|
+ self.assertEquals(0, len(collector.alerts()))
|
|
|
|
|
|
- ash.start()
|
|
|
+ self.assertEquals('WARNING', alerts[0]['state'])
|
|
|
+ self.assertEquals('bar is rendered-bar2, baz is rendered-baz2', alerts[0]['text'])
|
|
|
|
|
|
+
|
|
|
+ def __get_cluster_configuration(self):
|
|
|
+ """
|
|
|
+ Gets an instance of the cluster cache where the file read and write
|
|
|
+ operations have been mocked out
|
|
|
+ :return:
|
|
|
+ """
|
|
|
with patch("__builtin__.open") as open_mock:
|
|
|
- open_mock.side_effect = open_side_effect
|
|
|
- ash.update_configurations(commands)
|
|
|
+ open_mock.side_effect = self.open_side_effect
|
|
|
+ cluster_configuration = ClusterConfiguration("")
|
|
|
+ return cluster_configuration
|
|
|
+
|
|
|
+
|
|
|
+ def __update_cluster_configuration(self, cluster_configuration, configuration):
|
|
|
+ """
|
|
|
+ Updates the configuration cache, using as mock file as the disk based
|
|
|
+ cache so that a file is not created during tests
|
|
|
+ :return:
|
|
|
+ """
|
|
|
+ with patch("__builtin__.open") as open_mock:
|
|
|
+ open_mock.side_effect = self.open_side_effect
|
|
|
+ cluster_configuration._update_configurations("c1", configuration)
|
|
|
+
|
|
|
|
|
|
- self.assertTrue(json_mock.called)
|
|
|
- self.assertTrue(json_mock.called_with(all_commands))
|
|
|
- self.assertFalse(reschedule_mock.called)
|
|
|
+ def open_side_effect(self, file, mode):
|
|
|
+ if mode == 'w':
|
|
|
+ file_mock = MagicMock()
|
|
|
+ return file_mock
|
|
|
+ else:
|
|
|
+ return self.original_open(file, mode)
|