test_zkfc.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476
  1. #!/usr/bin/env python
  2. '''
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. '''
  17. from stacks.utils.RMFTestCase import *
  18. from ambari_commons import OSCheck
  19. from mock.mock import MagicMock, patch
  20. from resource_management.core import shell
  21. class TestZkfc(RMFTestCase):
  22. COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package"
  23. STACK_VERSION = "2.0.6"
  24. def test_start_default(self):
  25. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  26. classname = "ZkfcSlave",
  27. command = "start",
  28. config_file = "ha_default.json",
  29. hdp_stack_version = self.STACK_VERSION,
  30. target = RMFTestCase.TARGET_COMMON_SERVICES
  31. )
  32. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
  33. create_parents = True,
  34. )
  35. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64',
  36. create_parents = True,
  37. )
  38. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
  39. to = '/usr/lib/hadoop/lib/libsnappy.so',
  40. )
  41. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
  42. to = '/usr/lib/hadoop/lib64/libsnappy.so',
  43. )
  44. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  45. owner = 'root',
  46. group = 'root',
  47. create_parents = True,
  48. )
  49. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  50. content = Template('hdfs.conf.j2'),
  51. owner = 'root',
  52. group = 'root',
  53. mode = 0644,
  54. )
  55. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  56. owner = 'hdfs',
  57. group = 'hadoop',
  58. conf_dir = '/etc/hadoop/conf',
  59. configurations = self.getConfig()['configurations']['hdfs-site'],
  60. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  61. )
  62. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  63. owner = 'hdfs',
  64. group = 'hadoop',
  65. conf_dir = '/etc/hadoop/conf',
  66. configurations = self.getConfig()['configurations']['core-site'],
  67. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  68. mode = 0644
  69. )
  70. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  71. content = Template('slaves.j2'),
  72. owner = 'hdfs',
  73. )
  74. self.assertResourceCalled('Directory', '/var/run/hadoop',
  75. owner = 'hdfs',
  76. group = 'hadoop',
  77. mode = 0755
  78. )
  79. self.assertResourceCalled('Directory', '/var/run/hadoop',
  80. owner = 'hdfs',
  81. group = 'hadoop',
  82. mode = 0755,
  83. )
  84. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  85. owner = 'hdfs',
  86. create_parents = True,
  87. )
  88. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  89. owner = 'hdfs',
  90. create_parents = True,
  91. )
  92. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
  93. action = ['delete'],
  94. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid",
  95. )
  96. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'",
  97. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  98. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid",
  99. )
  100. self.assertNoMoreResources()
  101. def test_stop_default(self):
  102. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  103. classname = "ZkfcSlave",
  104. command = "stop",
  105. config_file = "ha_default.json",
  106. hdp_stack_version = self.STACK_VERSION,
  107. target = RMFTestCase.TARGET_COMMON_SERVICES
  108. )
  109. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop zkfc'",
  110. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  111. only_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid")
  112. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid', action = ['delete'])
  113. self.assertNoMoreResources()
  114. def test_start_secured(self):
  115. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  116. classname = "ZkfcSlave",
  117. command = "start",
  118. config_file = "ha_secured.json",
  119. hdp_stack_version = self.STACK_VERSION,
  120. target = RMFTestCase.TARGET_COMMON_SERVICES
  121. )
  122. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
  123. create_parents = True,
  124. )
  125. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64',
  126. create_parents = True,
  127. )
  128. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
  129. to = '/usr/lib/hadoop/lib/libsnappy.so',
  130. )
  131. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
  132. to = '/usr/lib/hadoop/lib64/libsnappy.so',
  133. )
  134. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  135. owner = 'root',
  136. group = 'root',
  137. create_parents = True,
  138. )
  139. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  140. content = Template('hdfs.conf.j2'),
  141. owner = 'root',
  142. group = 'root',
  143. mode = 0644,
  144. )
  145. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  146. owner = 'hdfs',
  147. group = 'hadoop',
  148. conf_dir = '/etc/hadoop/conf',
  149. configurations = self.getConfig()['configurations']['hdfs-site'],
  150. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  151. )
  152. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  153. owner = 'hdfs',
  154. group = 'hadoop',
  155. conf_dir = '/etc/hadoop/conf',
  156. configurations = self.getConfig()['configurations']['core-site'],
  157. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  158. mode = 0644
  159. )
  160. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  161. content = Template('slaves.j2'),
  162. owner = 'root',
  163. )
  164. self.assertResourceCalled('Directory', '/var/run/hadoop',
  165. owner = 'hdfs',
  166. group = 'hadoop',
  167. mode = 0755
  168. )
  169. self.assertResourceCalled('Directory', '/var/run/hadoop',
  170. owner = 'hdfs',
  171. group = 'hadoop',
  172. mode = 0755,
  173. )
  174. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  175. owner = 'hdfs',
  176. create_parents = True,
  177. )
  178. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  179. owner = 'hdfs',
  180. create_parents = True,
  181. )
  182. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
  183. action = ['delete'],
  184. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid",
  185. )
  186. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'",
  187. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  188. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid",
  189. )
  190. self.assertNoMoreResources()
  191. def test_stop_secured(self):
  192. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  193. classname = "ZkfcSlave",
  194. command = "stop",
  195. config_file = "ha_secured.json",
  196. hdp_stack_version = self.STACK_VERSION,
  197. target = RMFTestCase.TARGET_COMMON_SERVICES
  198. )
  199. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop zkfc'",
  200. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  201. only_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid")
  202. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid', action = ['delete'])
  203. self.assertNoMoreResources()
  204. def test_start_with_ha_active_namenode_bootstrap(self):
  205. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  206. classname = "ZkfcSlave",
  207. command = "start",
  208. config_file="ha_bootstrap_active_node.json",
  209. hdp_stack_version = self.STACK_VERSION,
  210. target = RMFTestCase.TARGET_COMMON_SERVICES
  211. )
  212. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
  213. create_parents = True,
  214. )
  215. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64',
  216. create_parents = True,
  217. )
  218. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
  219. to = '/usr/lib/hadoop/lib/libsnappy.so',
  220. )
  221. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
  222. to = '/usr/lib/hadoop/lib64/libsnappy.so',
  223. )
  224. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  225. owner = 'root',
  226. group = 'root',
  227. create_parents = True,
  228. )
  229. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  230. content = Template('hdfs.conf.j2'),
  231. owner = 'root',
  232. group = 'root',
  233. mode = 0644,
  234. )
  235. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  236. owner = 'hdfs',
  237. group = 'hadoop',
  238. conf_dir = '/etc/hadoop/conf',
  239. configurations = self.getConfig()['configurations']['hdfs-site'],
  240. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  241. )
  242. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  243. owner = 'hdfs',
  244. group = 'hadoop',
  245. conf_dir = '/etc/hadoop/conf',
  246. configurations = self.getConfig()['configurations']['core-site'],
  247. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  248. mode = 0644
  249. )
  250. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  251. content = Template('slaves.j2'),
  252. owner = 'hdfs',
  253. )
  254. self.assertResourceCalled('Directory', '/var/run/hadoop',
  255. owner = 'hdfs',
  256. group = 'hadoop',
  257. mode = 0755
  258. )
  259. # TODO: verify that the znode initialization occurs prior to ZKFC startup
  260. self.assertResourceCalled('Directory', '/var/run/hadoop',
  261. owner = 'hdfs',
  262. group = 'hadoop',
  263. mode = 0755,
  264. )
  265. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  266. owner = 'hdfs',
  267. create_parents = True,
  268. )
  269. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  270. owner = 'hdfs',
  271. create_parents = True,
  272. )
  273. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
  274. action = ['delete'],
  275. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid",
  276. )
  277. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'",
  278. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  279. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid",
  280. )
  281. self.assertNoMoreResources()
  282. def test_start_with_ha_standby_namenode_bootstrap(self):
  283. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  284. classname = "ZkfcSlave",
  285. command = "start",
  286. config_file="ha_bootstrap_standby_node.json",
  287. hdp_stack_version = self.STACK_VERSION,
  288. target = RMFTestCase.TARGET_COMMON_SERVICES
  289. )
  290. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
  291. create_parents = True,
  292. )
  293. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64',
  294. create_parents = True,
  295. )
  296. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
  297. to = '/usr/lib/hadoop/lib/libsnappy.so',
  298. )
  299. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
  300. to = '/usr/lib/hadoop/lib64/libsnappy.so',
  301. )
  302. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  303. owner = 'root',
  304. group = 'root',
  305. create_parents = True,
  306. )
  307. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  308. content = Template('hdfs.conf.j2'),
  309. owner = 'root',
  310. group = 'root',
  311. mode = 0644,
  312. )
  313. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  314. owner = 'hdfs',
  315. group = 'hadoop',
  316. conf_dir = '/etc/hadoop/conf',
  317. configurations = self.getConfig()['configurations']['hdfs-site'],
  318. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  319. )
  320. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  321. owner = 'hdfs',
  322. group = 'hadoop',
  323. conf_dir = '/etc/hadoop/conf',
  324. configurations = self.getConfig()['configurations']['core-site'],
  325. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  326. mode = 0644
  327. )
  328. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  329. content = Template('slaves.j2'),
  330. owner = 'hdfs',
  331. )
  332. self.assertResourceCalled('Directory', '/var/run/hadoop',
  333. owner = 'hdfs',
  334. group = 'hadoop',
  335. mode = 0755
  336. )
  337. # TODO: verify that the znode initialization occurs prior to ZKFC startup
  338. self.assertResourceCalled('Directory', '/var/run/hadoop',
  339. owner = 'hdfs',
  340. group = 'hadoop',
  341. mode = 0755,
  342. )
  343. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  344. owner = 'hdfs',
  345. create_parents = True,
  346. )
  347. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  348. owner = 'hdfs',
  349. create_parents = True,
  350. )
  351. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
  352. action = ['delete'],
  353. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid",
  354. )
  355. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'",
  356. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  357. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid",
  358. )
  359. self.assertNoMoreResources()
  360. @patch("resource_management.libraries.functions.security_commons.build_expectations")
  361. @patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
  362. @patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
  363. @patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
  364. @patch("resource_management.libraries.script.Script.put_structured_out")
  365. def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
  366. # Test that function works when is called with correct parameters
  367. security_params = {
  368. 'core-site': {
  369. 'hadoop.security.authentication': 'kerberos'
  370. }
  371. }
  372. props_value_check = {"hadoop.security.authentication": "kerberos",
  373. "hadoop.security.authorization": "true"}
  374. props_empty_check = ["hadoop.security.auth_to_local"]
  375. props_read_check = None
  376. result_issues = []
  377. get_params_mock.return_value = security_params
  378. validate_security_config_mock.return_value = result_issues
  379. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  380. classname = "ZkfcSlave",
  381. command = "security_status",
  382. config_file="secured.json",
  383. hdp_stack_version = self.STACK_VERSION,
  384. target = RMFTestCase.TARGET_COMMON_SERVICES
  385. )
  386. build_exp_mock.assert_called_with('core-site', props_value_check, props_empty_check, props_read_check)
  387. put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
  388. cached_kinit_executor_mock.called_with('/usr/bin/kinit',
  389. self.config_dict['configurations']['hadoop-env']['hdfs_user'],
  390. self.config_dict['configurations']['hadoop-env']['hdfs_user_keytab'],
  391. self.config_dict['configurations']['hadoop-env']['hdfs_user_principal_name'],
  392. self.config_dict['hostname'],
  393. '/tmp')
  394. # Testing that the exception throw by cached_executor is caught
  395. cached_kinit_executor_mock.reset_mock()
  396. cached_kinit_executor_mock.side_effect = Exception("Invalid command")
  397. try:
  398. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  399. classname = "ZkfcSlave",
  400. command = "security_status",
  401. config_file="secured.json",
  402. hdp_stack_version = self.STACK_VERSION,
  403. target = RMFTestCase.TARGET_COMMON_SERVICES
  404. )
  405. except:
  406. self.assertTrue(True)
  407. # Testing when hadoop.security.authentication is simple
  408. security_params['core-site']['hadoop.security.authentication'] = 'simple'
  409. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  410. classname = "ZkfcSlave",
  411. command = "security_status",
  412. config_file="secured.json",
  413. hdp_stack_version = self.STACK_VERSION,
  414. target = RMFTestCase.TARGET_COMMON_SERVICES
  415. )
  416. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  417. security_params['core-site']['hadoop.security.authentication'] = 'kerberos'
  418. # Testing with not empty result_issues
  419. result_issues_with_params = {
  420. 'hdfs-site': "Something bad happened"
  421. }
  422. validate_security_config_mock.reset_mock()
  423. get_params_mock.reset_mock()
  424. validate_security_config_mock.return_value = result_issues_with_params
  425. get_params_mock.return_value = security_params
  426. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  427. classname = "ZkfcSlave",
  428. command = "security_status",
  429. config_file="secured.json",
  430. hdp_stack_version = self.STACK_VERSION,
  431. target = RMFTestCase.TARGET_COMMON_SERVICES
  432. )
  433. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  434. # Testing with empty hdfs_user_principal and hdfs_user_keytab
  435. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  436. classname = "ZkfcSlave",
  437. command = "security_status",
  438. config_file="default.json",
  439. hdp_stack_version = self.STACK_VERSION,
  440. target = RMFTestCase.TARGET_COMMON_SERVICES
  441. )
  442. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})