#!/usr/bin/env python ''' Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' from stacks.utils.RMFTestCase import * from ambari_commons import OSCheck from mock.mock import MagicMock, patch from resource_management.core import shell class TestZkfc(RMFTestCase): COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package" STACK_VERSION = "2.0.6" def test_start_default(self): self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py", classname = "ZkfcSlave", command = "start", config_file = "ha_default.json", hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32', recursive = True, ) self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64', recursive = True, ) self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so', to = '/usr/lib/hadoop/lib/libsnappy.so', ) self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so', to = '/usr/lib/hadoop/lib64/libsnappy.so', ) self.assertResourceCalled('Directory', '/etc/security/limits.d', owner = 'root', group = 'root', recursive = True, ) self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf', content = Template('hdfs.conf.j2'), owner = 'root', group = 'root', mode = 0644, ) self.assertResourceCalled('XmlConfig', 'hdfs-site.xml', owner = 'hdfs', group = 'hadoop', conf_dir = '/etc/hadoop/conf', configurations = self.getConfig()['configurations']['hdfs-site'], configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site'] ) self.assertResourceCalled('XmlConfig', 'core-site.xml', owner = 'hdfs', group = 'hadoop', conf_dir = '/etc/hadoop/conf', configurations = self.getConfig()['configurations']['core-site'], configuration_attributes = self.getConfig()['configuration_attributes']['core-site'], mode = 0644 ) self.assertResourceCalled('File', '/etc/hadoop/conf/slaves', content = Template('slaves.j2'), owner = 'hdfs', ) self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar', content = StaticFile('fast-hdfs-resource.jar'), mode = 0644, ) self.assertResourceCalled('Directory', '/var/run/hadoop', owner = 'hdfs', group = 'hadoop', mode = 0755 ) self.assertResourceCalled('Directory', '/var/run/hadoop', owner = 'hdfs', group = 'hadoop', mode = 0755, ) self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs', owner = 'hdfs', recursive = True, ) self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs', owner = 'hdfs', recursive = True, ) self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid', action = ['delete'], not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1', ) self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'", environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'}, not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1', ) self.assertNoMoreResources() def test_stop_default(self): self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py", classname = "ZkfcSlave", command = "stop", config_file = "ha_default.json", hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid', action = ['delete'], not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1', ) self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop zkfc'", environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'}, not_if = None, ) self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid', action = ['delete'], ) self.assertNoMoreResources() def test_start_secured(self): self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py", classname = "ZkfcSlave", command = "start", config_file = "ha_secured.json", hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32', recursive = True, ) self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64', recursive = True, ) self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so', to = '/usr/lib/hadoop/lib/libsnappy.so', ) self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so', to = '/usr/lib/hadoop/lib64/libsnappy.so', ) self.assertResourceCalled('Directory', '/etc/security/limits.d', owner = 'root', group = 'root', recursive = True, ) self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf', content = Template('hdfs.conf.j2'), owner = 'root', group = 'root', mode = 0644, ) self.assertResourceCalled('XmlConfig', 'hdfs-site.xml', owner = 'hdfs', group = 'hadoop', conf_dir = '/etc/hadoop/conf', configurations = self.getConfig()['configurations']['hdfs-site'], configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site'] ) self.assertResourceCalled('XmlConfig', 'core-site.xml', owner = 'hdfs', group = 'hadoop', conf_dir = '/etc/hadoop/conf', configurations = self.getConfig()['configurations']['core-site'], configuration_attributes = self.getConfig()['configuration_attributes']['core-site'], mode = 0644 ) self.assertResourceCalled('File', '/etc/hadoop/conf/slaves', content = Template('slaves.j2'), owner = 'root', ) self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar', content = StaticFile('fast-hdfs-resource.jar'), mode = 0644, ) self.assertResourceCalled('Directory', '/var/run/hadoop', owner = 'hdfs', group = 'hadoop', mode = 0755 ) self.assertResourceCalled('Directory', '/var/run/hadoop', owner = 'hdfs', group = 'hadoop', mode = 0755, ) self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs', owner = 'hdfs', recursive = True, ) self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs', owner = 'hdfs', recursive = True, ) self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid', action = ['delete'], not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1', ) self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'", environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'}, not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1', ) self.assertNoMoreResources() def test_stop_secured(self): self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py", classname = "ZkfcSlave", command = "stop", config_file = "ha_secured.json", hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid', action = ['delete'], not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1', ) self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop zkfc'", environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'}, not_if = None, ) self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid', action = ['delete'], ) self.assertNoMoreResources() def test_start_with_ha_active_namenode_bootstrap(self): self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py", classname = "ZkfcSlave", command = "start", config_file="ha_bootstrap_active_node.json", hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32', recursive = True, ) self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64', recursive = True, ) self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so', to = '/usr/lib/hadoop/lib/libsnappy.so', ) self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so', to = '/usr/lib/hadoop/lib64/libsnappy.so', ) self.assertResourceCalled('Directory', '/etc/security/limits.d', owner = 'root', group = 'root', recursive = True, ) self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf', content = Template('hdfs.conf.j2'), owner = 'root', group = 'root', mode = 0644, ) self.assertResourceCalled('XmlConfig', 'hdfs-site.xml', owner = 'hdfs', group = 'hadoop', conf_dir = '/etc/hadoop/conf', configurations = self.getConfig()['configurations']['hdfs-site'], configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site'] ) self.assertResourceCalled('XmlConfig', 'core-site.xml', owner = 'hdfs', group = 'hadoop', conf_dir = '/etc/hadoop/conf', configurations = self.getConfig()['configurations']['core-site'], configuration_attributes = self.getConfig()['configuration_attributes']['core-site'], mode = 0644 ) self.assertResourceCalled('File', '/etc/hadoop/conf/slaves', content = Template('slaves.j2'), owner = 'hdfs', ) self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar', content = StaticFile('fast-hdfs-resource.jar'), mode = 0644, ) self.assertResourceCalled('Directory', '/var/run/hadoop', owner = 'hdfs', group = 'hadoop', mode = 0755 ) # TODO: verify that the znode initialization occurs prior to ZKFC startup self.assertResourceCalled('Directory', '/var/run/hadoop', owner = 'hdfs', group = 'hadoop', mode = 0755, ) self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs', owner = 'hdfs', recursive = True, ) self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs', owner = 'hdfs', recursive = True, ) self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid', action = ['delete'], not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1', ) self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'", environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'}, not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1', ) self.assertNoMoreResources() def test_start_with_ha_standby_namenode_bootstrap(self): self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py", classname = "ZkfcSlave", command = "start", config_file="ha_bootstrap_standby_node.json", hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32', recursive = True, ) self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64', recursive = True, ) self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so', to = '/usr/lib/hadoop/lib/libsnappy.so', ) self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so', to = '/usr/lib/hadoop/lib64/libsnappy.so', ) self.assertResourceCalled('Directory', '/etc/security/limits.d', owner = 'root', group = 'root', recursive = True, ) self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf', content = Template('hdfs.conf.j2'), owner = 'root', group = 'root', mode = 0644, ) self.assertResourceCalled('XmlConfig', 'hdfs-site.xml', owner = 'hdfs', group = 'hadoop', conf_dir = '/etc/hadoop/conf', configurations = self.getConfig()['configurations']['hdfs-site'], configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site'] ) self.assertResourceCalled('XmlConfig', 'core-site.xml', owner = 'hdfs', group = 'hadoop', conf_dir = '/etc/hadoop/conf', configurations = self.getConfig()['configurations']['core-site'], configuration_attributes = self.getConfig()['configuration_attributes']['core-site'], mode = 0644 ) self.assertResourceCalled('File', '/etc/hadoop/conf/slaves', content = Template('slaves.j2'), owner = 'hdfs', ) self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar', content = StaticFile('fast-hdfs-resource.jar'), mode = 0644, ) self.assertResourceCalled('Directory', '/var/run/hadoop', owner = 'hdfs', group = 'hadoop', mode = 0755 ) # TODO: verify that the znode initialization occurs prior to ZKFC startup self.assertResourceCalled('Directory', '/var/run/hadoop', owner = 'hdfs', group = 'hadoop', mode = 0755, ) self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs', owner = 'hdfs', recursive = True, ) self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs', owner = 'hdfs', recursive = True, ) self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid', action = ['delete'], not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1', ) self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'", environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'}, not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1', ) self.assertNoMoreResources() @patch("resource_management.libraries.functions.security_commons.build_expectations") @patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem") @patch("resource_management.libraries.functions.security_commons.validate_security_config_properties") @patch("resource_management.libraries.functions.security_commons.cached_kinit_executor") @patch("resource_management.libraries.script.Script.put_structured_out") def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock): # Test that function works when is called with correct parameters security_params = { 'core-site': { 'hadoop.security.authentication': 'kerberos' } } props_value_check = {"hadoop.security.authentication": "kerberos", "hadoop.security.authorization": "true"} props_empty_check = ["hadoop.security.auth_to_local"] props_read_check = None result_issues = [] get_params_mock.return_value = security_params validate_security_config_mock.return_value = result_issues self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py", classname = "ZkfcSlave", command = "security_status", config_file="secured.json", hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) build_exp_mock.assert_called_with('core-site', props_value_check, props_empty_check, props_read_check) put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"}) cached_kinit_executor_mock.called_with('/usr/bin/kinit', self.config_dict['configurations']['hadoop-env']['hdfs_user'], self.config_dict['configurations']['hadoop-env']['hdfs_user_keytab'], self.config_dict['configurations']['hadoop-env']['hdfs_user_principal_name'], self.config_dict['hostname'], '/tmp') # Testing that the exception throw by cached_executor is caught cached_kinit_executor_mock.reset_mock() cached_kinit_executor_mock.side_effect = Exception("Invalid command") try: self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py", classname = "ZkfcSlave", command = "security_status", config_file="secured.json", hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) except: self.assertTrue(True) # Testing when hadoop.security.authentication is simple security_params['core-site']['hadoop.security.authentication'] = 'simple' self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py", classname = "ZkfcSlave", command = "security_status", config_file="secured.json", hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"}) security_params['core-site']['hadoop.security.authentication'] = 'kerberos' # Testing with not empty result_issues result_issues_with_params = { 'hdfs-site': "Something bad happened" } validate_security_config_mock.reset_mock() get_params_mock.reset_mock() validate_security_config_mock.return_value = result_issues_with_params get_params_mock.return_value = security_params self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py", classname = "ZkfcSlave", command = "security_status", config_file="secured.json", hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"}) # Testing with empty hdfs_user_principal and hdfs_user_keytab self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py", classname = "ZkfcSlave", command = "security_status", config_file="default.json", hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})