test_zkfc.py 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441
  1. #!/usr/bin/env python
  2. '''
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. '''
  17. from stacks.utils.RMFTestCase import *
  18. from ambari_commons import OSCheck
  19. from mock.mock import MagicMock, patch
  20. class TestZkfc(RMFTestCase):
  21. COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package"
  22. STACK_VERSION = "2.0.6"
  23. def test_start_default(self):
  24. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  25. classname = "ZkfcSlave",
  26. command = "start",
  27. config_file = "ha_default.json",
  28. hdp_stack_version = self.STACK_VERSION,
  29. target = RMFTestCase.TARGET_COMMON_SERVICES
  30. )
  31. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  32. owner = 'root',
  33. group = 'root',
  34. recursive = True,
  35. )
  36. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  37. content = Template('hdfs.conf.j2'),
  38. owner = 'root',
  39. group = 'root',
  40. mode = 0644,
  41. )
  42. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  43. owner = 'hdfs',
  44. group = 'hadoop',
  45. conf_dir = '/etc/hadoop/conf',
  46. configurations = self.getConfig()['configurations']['hdfs-site'],
  47. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  48. )
  49. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  50. owner = 'hdfs',
  51. group = 'hadoop',
  52. conf_dir = '/etc/hadoop/conf',
  53. configurations = self.getConfig()['configurations']['core-site'],
  54. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  55. mode = 0644
  56. )
  57. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  58. content = Template('slaves.j2'),
  59. owner = 'hdfs',
  60. )
  61. self.assertResourceCalled('Directory', '/var/run/hadoop',
  62. owner = 'hdfs',
  63. group = 'hadoop',
  64. mode = 0755
  65. )
  66. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  67. owner = 'hdfs',
  68. recursive = True,
  69. )
  70. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  71. owner = 'hdfs',
  72. recursive = True,
  73. )
  74. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
  75. action = ['delete'],
  76. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
  77. )
  78. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'",
  79. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  80. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
  81. )
  82. self.assertNoMoreResources()
  83. def test_stop_default(self):
  84. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  85. classname = "ZkfcSlave",
  86. command = "stop",
  87. config_file = "ha_default.json",
  88. hdp_stack_version = self.STACK_VERSION,
  89. target = RMFTestCase.TARGET_COMMON_SERVICES
  90. )
  91. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  92. owner = 'hdfs',
  93. recursive = True,
  94. )
  95. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  96. owner = 'hdfs',
  97. recursive = True,
  98. )
  99. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
  100. action = ['delete'],
  101. not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
  102. )
  103. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop zkfc'",
  104. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  105. not_if = None,
  106. )
  107. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
  108. action = ['delete'],
  109. )
  110. self.assertNoMoreResources()
  111. def test_start_secured(self):
  112. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  113. classname = "ZkfcSlave",
  114. command = "start",
  115. config_file = "ha_secured.json",
  116. hdp_stack_version = self.STACK_VERSION,
  117. target = RMFTestCase.TARGET_COMMON_SERVICES
  118. )
  119. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  120. owner = 'root',
  121. group = 'root',
  122. recursive = True,
  123. )
  124. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  125. content = Template('hdfs.conf.j2'),
  126. owner = 'root',
  127. group = 'root',
  128. mode = 0644,
  129. )
  130. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  131. owner = 'hdfs',
  132. group = 'hadoop',
  133. conf_dir = '/etc/hadoop/conf',
  134. configurations = self.getConfig()['configurations']['hdfs-site'],
  135. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  136. )
  137. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  138. owner = 'hdfs',
  139. group = 'hadoop',
  140. conf_dir = '/etc/hadoop/conf',
  141. configurations = self.getConfig()['configurations']['core-site'],
  142. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  143. mode = 0644
  144. )
  145. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  146. content = Template('slaves.j2'),
  147. owner = 'root',
  148. )
  149. self.assertResourceCalled('Directory', '/var/run/hadoop',
  150. owner = 'hdfs',
  151. group = 'hadoop',
  152. mode = 0755
  153. )
  154. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  155. owner = 'hdfs',
  156. recursive = True,
  157. )
  158. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  159. owner = 'hdfs',
  160. recursive = True,
  161. )
  162. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
  163. action = ['delete'],
  164. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
  165. )
  166. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'",
  167. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  168. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
  169. )
  170. self.assertNoMoreResources()
  171. def test_stop_secured(self):
  172. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  173. classname = "ZkfcSlave",
  174. command = "stop",
  175. config_file = "ha_secured.json",
  176. hdp_stack_version = self.STACK_VERSION,
  177. target = RMFTestCase.TARGET_COMMON_SERVICES
  178. )
  179. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  180. owner = 'hdfs',
  181. recursive = True,
  182. )
  183. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  184. owner = 'hdfs',
  185. recursive = True,
  186. )
  187. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
  188. action = ['delete'],
  189. not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
  190. )
  191. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop zkfc'",
  192. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  193. not_if = None,
  194. )
  195. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
  196. action = ['delete'],
  197. )
  198. self.assertNoMoreResources()
  199. def test_start_with_ha_active_namenode_bootstrap(self):
  200. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  201. classname = "ZkfcSlave",
  202. command = "start",
  203. config_file="ha_bootstrap_active_node.json",
  204. hdp_stack_version = self.STACK_VERSION,
  205. target = RMFTestCase.TARGET_COMMON_SERVICES
  206. )
  207. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  208. owner = 'root',
  209. group = 'root',
  210. recursive = True,
  211. )
  212. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  213. content = Template('hdfs.conf.j2'),
  214. owner = 'root',
  215. group = 'root',
  216. mode = 0644,
  217. )
  218. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  219. owner = 'hdfs',
  220. group = 'hadoop',
  221. conf_dir = '/etc/hadoop/conf',
  222. configurations = self.getConfig()['configurations']['hdfs-site'],
  223. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  224. )
  225. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  226. owner = 'hdfs',
  227. group = 'hadoop',
  228. conf_dir = '/etc/hadoop/conf',
  229. configurations = self.getConfig()['configurations']['core-site'],
  230. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  231. mode = 0644
  232. )
  233. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  234. content = Template('slaves.j2'),
  235. owner = 'hdfs',
  236. )
  237. self.assertResourceCalled('Directory', '/var/run/hadoop',
  238. owner = 'hdfs',
  239. group = 'hadoop',
  240. mode = 0755
  241. )
  242. # verify that the znode initialization occurs prior to ZKFC startup
  243. self.assertResourceCalled('Execute', 'hdfs zkfc -formatZK -force -nonInteractive',
  244. user = 'hdfs')
  245. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  246. owner = 'hdfs',
  247. recursive = True,
  248. )
  249. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  250. owner = 'hdfs',
  251. recursive = True,
  252. )
  253. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
  254. action = ['delete'],
  255. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
  256. )
  257. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'",
  258. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  259. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
  260. )
  261. self.assertNoMoreResources()
  262. def test_start_with_ha_standby_namenode_bootstrap(self):
  263. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  264. classname = "ZkfcSlave",
  265. command = "start",
  266. config_file="ha_bootstrap_standby_node.json",
  267. hdp_stack_version = self.STACK_VERSION,
  268. target = RMFTestCase.TARGET_COMMON_SERVICES
  269. )
  270. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  271. owner = 'root',
  272. group = 'root',
  273. recursive = True,
  274. )
  275. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  276. content = Template('hdfs.conf.j2'),
  277. owner = 'root',
  278. group = 'root',
  279. mode = 0644,
  280. )
  281. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  282. owner = 'hdfs',
  283. group = 'hadoop',
  284. conf_dir = '/etc/hadoop/conf',
  285. configurations = self.getConfig()['configurations']['hdfs-site'],
  286. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  287. )
  288. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  289. owner = 'hdfs',
  290. group = 'hadoop',
  291. conf_dir = '/etc/hadoop/conf',
  292. configurations = self.getConfig()['configurations']['core-site'],
  293. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  294. mode = 0644
  295. )
  296. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  297. content = Template('slaves.j2'),
  298. owner = 'hdfs',
  299. )
  300. self.assertResourceCalled('Directory', '/var/run/hadoop',
  301. owner = 'hdfs',
  302. group = 'hadoop',
  303. mode = 0755
  304. )
  305. # verify that the znode initialization occurs prior to ZKFC startup
  306. self.assertResourceCalled('Execute', 'hdfs zkfc -formatZK -force -nonInteractive',
  307. user = 'hdfs')
  308. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  309. owner = 'hdfs',
  310. recursive = True,
  311. )
  312. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  313. owner = 'hdfs',
  314. recursive = True,
  315. )
  316. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
  317. action = ['delete'],
  318. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
  319. )
  320. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'",
  321. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  322. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
  323. )
  324. self.assertNoMoreResources()
  325. @patch("resource_management.libraries.functions.security_commons.build_expectations")
  326. @patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
  327. @patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
  328. @patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
  329. @patch("resource_management.libraries.script.Script.put_structured_out")
  330. def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
  331. print "HELLO WORLD!!!!"
  332. # Test that function works when is called with correct parameters
  333. security_params = {
  334. 'core-site': {
  335. 'hadoop.security.authentication': 'kerberos'
  336. }
  337. }
  338. props_value_check = {"hadoop.security.authentication": "kerberos",
  339. "hadoop.security.authorization": "true"}
  340. props_empty_check = ["hadoop.security.auth_to_local"]
  341. props_read_check = None
  342. result_issues = []
  343. get_params_mock.return_value = security_params
  344. validate_security_config_mock.return_value = result_issues
  345. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  346. classname = "ZkfcSlave",
  347. command = "security_status",
  348. config_file="secured.json",
  349. hdp_stack_version = self.STACK_VERSION,
  350. target = RMFTestCase.TARGET_COMMON_SERVICES
  351. )
  352. build_exp_mock.assert_called_with('core-site', props_value_check, props_empty_check, props_read_check)
  353. put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
  354. cached_kinit_executor_mock.called_with('/usr/bin/kinit',
  355. self.config_dict['configurations']['hadoop-env']['hdfs_user'],
  356. self.config_dict['configurations']['hadoop-env']['hdfs_user_keytab'],
  357. self.config_dict['configurations']['hadoop-env']['hdfs_user_principal_name'],
  358. self.config_dict['hostname'],
  359. '/tmp')
  360. # Testing that the exception throw by cached_executor is caught
  361. cached_kinit_executor_mock.reset_mock()
  362. cached_kinit_executor_mock.side_effect = Exception("Invalid command")
  363. try:
  364. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  365. classname = "ZkfcSlave",
  366. command = "security_status",
  367. config_file="secured.json",
  368. hdp_stack_version = self.STACK_VERSION,
  369. target = RMFTestCase.TARGET_COMMON_SERVICES
  370. )
  371. except:
  372. self.assertTrue(True)
  373. # Testing when hadoop.security.authentication is simple
  374. security_params['core-site']['hadoop.security.authentication'] = 'simple'
  375. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  376. classname = "ZkfcSlave",
  377. command = "security_status",
  378. config_file="secured.json",
  379. hdp_stack_version = self.STACK_VERSION,
  380. target = RMFTestCase.TARGET_COMMON_SERVICES
  381. )
  382. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  383. security_params['core-site']['hadoop.security.authentication'] = 'kerberos'
  384. # Testing with not empty result_issues
  385. result_issues_with_params = {
  386. 'hdfs-site': "Something bad happened"
  387. }
  388. validate_security_config_mock.reset_mock()
  389. get_params_mock.reset_mock()
  390. validate_security_config_mock.return_value = result_issues_with_params
  391. get_params_mock.return_value = security_params
  392. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  393. classname = "ZkfcSlave",
  394. command = "security_status",
  395. config_file="secured.json",
  396. hdp_stack_version = self.STACK_VERSION,
  397. target = RMFTestCase.TARGET_COMMON_SERVICES
  398. )
  399. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  400. # Testing with empty hdfs_user_principal and hdfs_user_keytab
  401. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  402. classname = "ZkfcSlave",
  403. command = "security_status",
  404. config_file="default.json",
  405. hdp_stack_version = self.STACK_VERSION,
  406. target = RMFTestCase.TARGET_COMMON_SERVICES
  407. )
  408. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})