test_zkfc.py 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500
  1. #!/usr/bin/env python
  2. '''
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. '''
  17. from stacks.utils.RMFTestCase import *
  18. from ambari_commons import OSCheck
  19. from mock.mock import MagicMock, patch
  20. from resource_management.core import shell
  21. class TestZkfc(RMFTestCase):
  22. COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package"
  23. STACK_VERSION = "2.0.6"
  24. def test_start_default(self):
  25. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  26. classname = "ZkfcSlave",
  27. command = "start",
  28. config_file = "ha_default.json",
  29. hdp_stack_version = self.STACK_VERSION,
  30. target = RMFTestCase.TARGET_COMMON_SERVICES
  31. )
  32. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
  33. recursive = True,
  34. )
  35. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64',
  36. recursive = True,
  37. )
  38. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
  39. to = '/usr/lib/hadoop/lib/libsnappy.so',
  40. )
  41. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
  42. to = '/usr/lib/hadoop/lib64/libsnappy.so',
  43. )
  44. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  45. owner = 'root',
  46. group = 'root',
  47. recursive = True,
  48. )
  49. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  50. content = Template('hdfs.conf.j2'),
  51. owner = 'root',
  52. group = 'root',
  53. mode = 0644,
  54. )
  55. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  56. owner = 'hdfs',
  57. group = 'hadoop',
  58. conf_dir = '/etc/hadoop/conf',
  59. configurations = self.getConfig()['configurations']['hdfs-site'],
  60. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  61. )
  62. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  63. owner = 'hdfs',
  64. group = 'hadoop',
  65. conf_dir = '/etc/hadoop/conf',
  66. configurations = self.getConfig()['configurations']['core-site'],
  67. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  68. mode = 0644
  69. )
  70. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  71. content = Template('slaves.j2'),
  72. owner = 'hdfs',
  73. )
  74. self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
  75. content = StaticFile('fast-hdfs-resource.jar'),
  76. mode = 0644,
  77. )
  78. self.assertResourceCalled('Directory', '/var/run/hadoop',
  79. owner = 'hdfs',
  80. group = 'hadoop',
  81. mode = 0755
  82. )
  83. self.assertResourceCalled('Directory', '/var/run/hadoop',
  84. owner = 'hdfs',
  85. group = 'hadoop',
  86. mode = 0755,
  87. )
  88. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  89. owner = 'hdfs',
  90. recursive = True,
  91. )
  92. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  93. owner = 'hdfs',
  94. recursive = True,
  95. )
  96. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
  97. action = ['delete'],
  98. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
  99. )
  100. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'",
  101. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  102. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
  103. )
  104. self.assertNoMoreResources()
  105. def test_stop_default(self):
  106. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  107. classname = "ZkfcSlave",
  108. command = "stop",
  109. config_file = "ha_default.json",
  110. hdp_stack_version = self.STACK_VERSION,
  111. target = RMFTestCase.TARGET_COMMON_SERVICES
  112. )
  113. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
  114. action = ['delete'],
  115. not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
  116. )
  117. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop zkfc'",
  118. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  119. not_if = None,
  120. )
  121. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
  122. action = ['delete'],
  123. )
  124. self.assertNoMoreResources()
  125. def test_start_secured(self):
  126. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  127. classname = "ZkfcSlave",
  128. command = "start",
  129. config_file = "ha_secured.json",
  130. hdp_stack_version = self.STACK_VERSION,
  131. target = RMFTestCase.TARGET_COMMON_SERVICES
  132. )
  133. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
  134. recursive = True,
  135. )
  136. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64',
  137. recursive = True,
  138. )
  139. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
  140. to = '/usr/lib/hadoop/lib/libsnappy.so',
  141. )
  142. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
  143. to = '/usr/lib/hadoop/lib64/libsnappy.so',
  144. )
  145. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  146. owner = 'root',
  147. group = 'root',
  148. recursive = True,
  149. )
  150. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  151. content = Template('hdfs.conf.j2'),
  152. owner = 'root',
  153. group = 'root',
  154. mode = 0644,
  155. )
  156. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  157. owner = 'hdfs',
  158. group = 'hadoop',
  159. conf_dir = '/etc/hadoop/conf',
  160. configurations = self.getConfig()['configurations']['hdfs-site'],
  161. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  162. )
  163. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  164. owner = 'hdfs',
  165. group = 'hadoop',
  166. conf_dir = '/etc/hadoop/conf',
  167. configurations = self.getConfig()['configurations']['core-site'],
  168. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  169. mode = 0644
  170. )
  171. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  172. content = Template('slaves.j2'),
  173. owner = 'root',
  174. )
  175. self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
  176. content = StaticFile('fast-hdfs-resource.jar'),
  177. mode = 0644,
  178. )
  179. self.assertResourceCalled('Directory', '/var/run/hadoop',
  180. owner = 'hdfs',
  181. group = 'hadoop',
  182. mode = 0755
  183. )
  184. self.assertResourceCalled('Directory', '/var/run/hadoop',
  185. owner = 'hdfs',
  186. group = 'hadoop',
  187. mode = 0755,
  188. )
  189. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  190. owner = 'hdfs',
  191. recursive = True,
  192. )
  193. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  194. owner = 'hdfs',
  195. recursive = True,
  196. )
  197. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
  198. action = ['delete'],
  199. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
  200. )
  201. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'",
  202. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  203. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
  204. )
  205. self.assertNoMoreResources()
  206. def test_stop_secured(self):
  207. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  208. classname = "ZkfcSlave",
  209. command = "stop",
  210. config_file = "ha_secured.json",
  211. hdp_stack_version = self.STACK_VERSION,
  212. target = RMFTestCase.TARGET_COMMON_SERVICES
  213. )
  214. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
  215. action = ['delete'],
  216. not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
  217. )
  218. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop zkfc'",
  219. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  220. not_if = None,
  221. )
  222. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
  223. action = ['delete'],
  224. )
  225. self.assertNoMoreResources()
  226. def test_start_with_ha_active_namenode_bootstrap(self):
  227. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  228. classname = "ZkfcSlave",
  229. command = "start",
  230. config_file="ha_bootstrap_active_node.json",
  231. hdp_stack_version = self.STACK_VERSION,
  232. target = RMFTestCase.TARGET_COMMON_SERVICES
  233. )
  234. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
  235. recursive = True,
  236. )
  237. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64',
  238. recursive = True,
  239. )
  240. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
  241. to = '/usr/lib/hadoop/lib/libsnappy.so',
  242. )
  243. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
  244. to = '/usr/lib/hadoop/lib64/libsnappy.so',
  245. )
  246. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  247. owner = 'root',
  248. group = 'root',
  249. recursive = True,
  250. )
  251. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  252. content = Template('hdfs.conf.j2'),
  253. owner = 'root',
  254. group = 'root',
  255. mode = 0644,
  256. )
  257. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  258. owner = 'hdfs',
  259. group = 'hadoop',
  260. conf_dir = '/etc/hadoop/conf',
  261. configurations = self.getConfig()['configurations']['hdfs-site'],
  262. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  263. )
  264. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  265. owner = 'hdfs',
  266. group = 'hadoop',
  267. conf_dir = '/etc/hadoop/conf',
  268. configurations = self.getConfig()['configurations']['core-site'],
  269. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  270. mode = 0644
  271. )
  272. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  273. content = Template('slaves.j2'),
  274. owner = 'hdfs',
  275. )
  276. self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
  277. content = StaticFile('fast-hdfs-resource.jar'),
  278. mode = 0644,
  279. )
  280. self.assertResourceCalled('Directory', '/var/run/hadoop',
  281. owner = 'hdfs',
  282. group = 'hadoop',
  283. mode = 0755
  284. )
  285. # TODO: verify that the znode initialization occurs prior to ZKFC startup
  286. self.assertResourceCalled('Directory', '/var/run/hadoop',
  287. owner = 'hdfs',
  288. group = 'hadoop',
  289. mode = 0755,
  290. )
  291. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  292. owner = 'hdfs',
  293. recursive = True,
  294. )
  295. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  296. owner = 'hdfs',
  297. recursive = True,
  298. )
  299. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
  300. action = ['delete'],
  301. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
  302. )
  303. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'",
  304. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  305. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
  306. )
  307. self.assertNoMoreResources()
  308. def test_start_with_ha_standby_namenode_bootstrap(self):
  309. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  310. classname = "ZkfcSlave",
  311. command = "start",
  312. config_file="ha_bootstrap_standby_node.json",
  313. hdp_stack_version = self.STACK_VERSION,
  314. target = RMFTestCase.TARGET_COMMON_SERVICES
  315. )
  316. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
  317. recursive = True,
  318. )
  319. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64',
  320. recursive = True,
  321. )
  322. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
  323. to = '/usr/lib/hadoop/lib/libsnappy.so',
  324. )
  325. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
  326. to = '/usr/lib/hadoop/lib64/libsnappy.so',
  327. )
  328. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  329. owner = 'root',
  330. group = 'root',
  331. recursive = True,
  332. )
  333. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  334. content = Template('hdfs.conf.j2'),
  335. owner = 'root',
  336. group = 'root',
  337. mode = 0644,
  338. )
  339. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  340. owner = 'hdfs',
  341. group = 'hadoop',
  342. conf_dir = '/etc/hadoop/conf',
  343. configurations = self.getConfig()['configurations']['hdfs-site'],
  344. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  345. )
  346. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  347. owner = 'hdfs',
  348. group = 'hadoop',
  349. conf_dir = '/etc/hadoop/conf',
  350. configurations = self.getConfig()['configurations']['core-site'],
  351. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  352. mode = 0644
  353. )
  354. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  355. content = Template('slaves.j2'),
  356. owner = 'hdfs',
  357. )
  358. self.assertResourceCalled('File', '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
  359. content = StaticFile('fast-hdfs-resource.jar'),
  360. mode = 0644,
  361. )
  362. self.assertResourceCalled('Directory', '/var/run/hadoop',
  363. owner = 'hdfs',
  364. group = 'hadoop',
  365. mode = 0755
  366. )
  367. # TODO: verify that the znode initialization occurs prior to ZKFC startup
  368. self.assertResourceCalled('Directory', '/var/run/hadoop',
  369. owner = 'hdfs',
  370. group = 'hadoop',
  371. mode = 0755,
  372. )
  373. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  374. owner = 'hdfs',
  375. recursive = True,
  376. )
  377. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  378. owner = 'hdfs',
  379. recursive = True,
  380. )
  381. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
  382. action = ['delete'],
  383. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
  384. )
  385. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'",
  386. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  387. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
  388. )
  389. self.assertNoMoreResources()
  390. @patch("resource_management.libraries.functions.security_commons.build_expectations")
  391. @patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
  392. @patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
  393. @patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
  394. @patch("resource_management.libraries.script.Script.put_structured_out")
  395. def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
  396. # Test that function works when is called with correct parameters
  397. security_params = {
  398. 'core-site': {
  399. 'hadoop.security.authentication': 'kerberos'
  400. }
  401. }
  402. props_value_check = {"hadoop.security.authentication": "kerberos",
  403. "hadoop.security.authorization": "true"}
  404. props_empty_check = ["hadoop.security.auth_to_local"]
  405. props_read_check = None
  406. result_issues = []
  407. get_params_mock.return_value = security_params
  408. validate_security_config_mock.return_value = result_issues
  409. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  410. classname = "ZkfcSlave",
  411. command = "security_status",
  412. config_file="secured.json",
  413. hdp_stack_version = self.STACK_VERSION,
  414. target = RMFTestCase.TARGET_COMMON_SERVICES
  415. )
  416. build_exp_mock.assert_called_with('core-site', props_value_check, props_empty_check, props_read_check)
  417. put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
  418. cached_kinit_executor_mock.called_with('/usr/bin/kinit',
  419. self.config_dict['configurations']['hadoop-env']['hdfs_user'],
  420. self.config_dict['configurations']['hadoop-env']['hdfs_user_keytab'],
  421. self.config_dict['configurations']['hadoop-env']['hdfs_user_principal_name'],
  422. self.config_dict['hostname'],
  423. '/tmp')
  424. # Testing that the exception throw by cached_executor is caught
  425. cached_kinit_executor_mock.reset_mock()
  426. cached_kinit_executor_mock.side_effect = Exception("Invalid command")
  427. try:
  428. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  429. classname = "ZkfcSlave",
  430. command = "security_status",
  431. config_file="secured.json",
  432. hdp_stack_version = self.STACK_VERSION,
  433. target = RMFTestCase.TARGET_COMMON_SERVICES
  434. )
  435. except:
  436. self.assertTrue(True)
  437. # Testing when hadoop.security.authentication is simple
  438. security_params['core-site']['hadoop.security.authentication'] = 'simple'
  439. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  440. classname = "ZkfcSlave",
  441. command = "security_status",
  442. config_file="secured.json",
  443. hdp_stack_version = self.STACK_VERSION,
  444. target = RMFTestCase.TARGET_COMMON_SERVICES
  445. )
  446. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  447. security_params['core-site']['hadoop.security.authentication'] = 'kerberos'
  448. # Testing with not empty result_issues
  449. result_issues_with_params = {
  450. 'hdfs-site': "Something bad happened"
  451. }
  452. validate_security_config_mock.reset_mock()
  453. get_params_mock.reset_mock()
  454. validate_security_config_mock.return_value = result_issues_with_params
  455. get_params_mock.return_value = security_params
  456. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  457. classname = "ZkfcSlave",
  458. command = "security_status",
  459. config_file="secured.json",
  460. hdp_stack_version = self.STACK_VERSION,
  461. target = RMFTestCase.TARGET_COMMON_SERVICES
  462. )
  463. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  464. # Testing with empty hdfs_user_principal and hdfs_user_keytab
  465. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/zkfc_slave.py",
  466. classname = "ZkfcSlave",
  467. command = "security_status",
  468. config_file="default.json",
  469. hdp_stack_version = self.STACK_VERSION,
  470. target = RMFTestCase.TARGET_COMMON_SERVICES
  471. )
  472. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})