test_snamenode.py 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400
  1. #!/usr/bin/env python
  2. '''
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. '''
  17. from stacks.utils.RMFTestCase import *
  18. from ambari_commons import OSCheck
  19. from mock.mock import MagicMock, patch
  20. class TestSNamenode(RMFTestCase):
  21. COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package"
  22. STACK_VERSION = "2.0.6"
  23. def test_configure_default(self):
  24. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/snamenode.py",
  25. classname = "SNameNode",
  26. command = "configure",
  27. config_file = "default.json",
  28. hdp_stack_version = self.STACK_VERSION,
  29. target = RMFTestCase.TARGET_COMMON_SERVICES
  30. )
  31. self.assert_configure_default()
  32. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  33. owner = 'hdfs',
  34. content = Template('exclude_hosts_list.j2'),
  35. group = 'hadoop',
  36. )
  37. self.assertNoMoreResources()
  38. def test_start_default(self):
  39. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/snamenode.py",
  40. classname = "SNameNode",
  41. command = "start",
  42. config_file = "default.json",
  43. hdp_stack_version = self.STACK_VERSION,
  44. target = RMFTestCase.TARGET_COMMON_SERVICES
  45. )
  46. self.assert_configure_default()
  47. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  48. owner = 'hdfs',
  49. content = Template('exclude_hosts_list.j2'),
  50. group = 'hadoop',
  51. )
  52. self.assertResourceCalled('Directory', '/var/run/hadoop',
  53. owner = 'hdfs',
  54. group = 'hadoop',
  55. mode = 0755
  56. )
  57. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  58. owner = 'hdfs',
  59. recursive = True,
  60. )
  61. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  62. owner = 'hdfs',
  63. recursive = True,
  64. )
  65. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
  66. action = ['delete'],
  67. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid",
  68. )
  69. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode'",
  70. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  71. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid",
  72. )
  73. self.assertNoMoreResources()
  74. def test_stop_default(self):
  75. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/snamenode.py",
  76. classname = "SNameNode",
  77. command = "stop",
  78. config_file = "default.json",
  79. hdp_stack_version = self.STACK_VERSION,
  80. target = RMFTestCase.TARGET_COMMON_SERVICES
  81. )
  82. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
  83. action = ['delete'],
  84. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid",
  85. )
  86. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop secondarynamenode'",
  87. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  88. not_if = None,
  89. )
  90. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
  91. action = ['delete'],
  92. )
  93. self.assertNoMoreResources()
  94. def test_configure_secured(self):
  95. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/snamenode.py",
  96. classname = "SNameNode",
  97. command = "configure",
  98. config_file = "secured.json",
  99. hdp_stack_version = self.STACK_VERSION,
  100. target = RMFTestCase.TARGET_COMMON_SERVICES
  101. )
  102. self.assert_configure_secured()
  103. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  104. owner = 'hdfs',
  105. content = Template('exclude_hosts_list.j2'),
  106. group = 'hadoop',
  107. )
  108. self.assertNoMoreResources()
  109. def test_start_secured(self):
  110. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/snamenode.py",
  111. classname = "SNameNode",
  112. command = "start",
  113. config_file = "secured.json",
  114. hdp_stack_version = self.STACK_VERSION,
  115. target = RMFTestCase.TARGET_COMMON_SERVICES
  116. )
  117. self.assert_configure_secured()
  118. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  119. owner = 'hdfs',
  120. content = Template('exclude_hosts_list.j2'),
  121. group = 'hadoop',
  122. )
  123. self.assertResourceCalled('Directory', '/var/run/hadoop',
  124. owner = 'hdfs',
  125. group = 'hadoop',
  126. mode = 0755
  127. )
  128. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  129. owner = 'hdfs',
  130. recursive = True,
  131. )
  132. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  133. owner = 'hdfs',
  134. recursive = True,
  135. )
  136. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
  137. action = ['delete'],
  138. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid",
  139. )
  140. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode'",
  141. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  142. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid",
  143. )
  144. self.assertNoMoreResources()
  145. def test_stop_secured(self):
  146. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/snamenode.py",
  147. classname = "SNameNode",
  148. command = "stop",
  149. config_file = "secured.json",
  150. hdp_stack_version = self.STACK_VERSION,
  151. target = RMFTestCase.TARGET_COMMON_SERVICES
  152. )
  153. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
  154. action = ['delete'],
  155. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid",
  156. )
  157. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop secondarynamenode'",
  158. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  159. not_if = None,
  160. )
  161. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
  162. action = ['delete'],
  163. )
  164. self.assertNoMoreResources()
  165. def assert_configure_default(self):
  166. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
  167. recursive = True,
  168. )
  169. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64',
  170. recursive = True,
  171. )
  172. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
  173. to = '/usr/lib/hadoop/lib/libsnappy.so',
  174. )
  175. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
  176. to = '/usr/lib/hadoop/lib64/libsnappy.so',
  177. )
  178. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  179. owner = 'root',
  180. group = 'root',
  181. recursive = True,
  182. )
  183. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  184. content = Template('hdfs.conf.j2'),
  185. owner = 'root',
  186. group = 'root',
  187. mode = 0644,
  188. )
  189. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  190. owner = 'hdfs',
  191. group = 'hadoop',
  192. conf_dir = '/etc/hadoop/conf',
  193. configurations = self.getConfig()['configurations']['hdfs-site'],
  194. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  195. )
  196. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  197. owner = 'hdfs',
  198. group = 'hadoop',
  199. conf_dir = '/etc/hadoop/conf',
  200. configurations = self.getConfig()['configurations']['core-site'],
  201. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  202. mode = 0644
  203. )
  204. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  205. content = Template('slaves.j2'),
  206. owner = 'hdfs',
  207. )
  208. self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
  209. owner = 'hdfs',
  210. group = 'hadoop',
  211. mode = 0755,
  212. recursive = True,
  213. cd_access='a'
  214. )
  215. self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary2',
  216. owner = 'hdfs',
  217. group = 'hadoop',
  218. mode = 0755,
  219. recursive = True,
  220. cd_access='a'
  221. )
  222. def assert_configure_secured(self):
  223. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
  224. recursive = True,
  225. )
  226. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64',
  227. recursive = True,
  228. )
  229. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
  230. to = '/usr/lib/hadoop/lib/libsnappy.so',
  231. )
  232. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
  233. to = '/usr/lib/hadoop/lib64/libsnappy.so',
  234. )
  235. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  236. owner = 'root',
  237. group = 'root',
  238. recursive = True,
  239. )
  240. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  241. content = Template('hdfs.conf.j2'),
  242. owner = 'root',
  243. group = 'root',
  244. mode = 0644,
  245. )
  246. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  247. owner = 'hdfs',
  248. group = 'hadoop',
  249. conf_dir = '/etc/hadoop/conf',
  250. configurations = self.getConfig()['configurations']['hdfs-site'],
  251. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  252. )
  253. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  254. owner = 'hdfs',
  255. group = 'hadoop',
  256. conf_dir = '/etc/hadoop/conf',
  257. configurations = self.getConfig()['configurations']['core-site'],
  258. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  259. mode = 0644
  260. )
  261. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  262. content = Template('slaves.j2'),
  263. owner = 'root',
  264. )
  265. self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
  266. owner = 'hdfs',
  267. group = 'hadoop',
  268. mode = 0755,
  269. recursive = True,
  270. cd_access='a'
  271. )
  272. @patch("resource_management.libraries.functions.security_commons.build_expectations")
  273. @patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
  274. @patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
  275. @patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
  276. @patch("resource_management.libraries.script.Script.put_structured_out")
  277. def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
  278. # Test that function works when is called with correct parameters
  279. security_params = {
  280. 'core-site': {
  281. 'hadoop.security.authentication': 'kerberos'
  282. },
  283. 'hdfs-site': {
  284. 'dfs.secondary.namenode.keytab.file': 'path/to/snamenode/keytab/file',
  285. 'dfs.secondary.namenode.kerberos.principal': 'snamenode_principal'
  286. }
  287. }
  288. props_value_check = None
  289. props_empty_check = ['dfs.secondary.namenode.kerberos.internal.spnego.principal',
  290. 'dfs.secondary.namenode.keytab.file',
  291. 'dfs.secondary.namenode.kerberos.principal']
  292. props_read_check = ['dfs.secondary.namenode.keytab.file']
  293. result_issues = []
  294. get_params_mock.return_value = security_params
  295. validate_security_config_mock.return_value = result_issues
  296. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/snamenode.py",
  297. classname = "SNameNode",
  298. command = "security_status",
  299. config_file="secured.json",
  300. hdp_stack_version = self.STACK_VERSION,
  301. target = RMFTestCase.TARGET_COMMON_SERVICES
  302. )
  303. build_exp_mock.assert_called_with('hdfs-site', props_value_check, props_empty_check, props_read_check)
  304. put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
  305. cached_kinit_executor_mock.called_with('/usr/bin/kinit',
  306. self.config_dict['configurations']['hadoop-env']['hdfs_user'],
  307. security_params['hdfs-site']['dfs.secondary.namenode.keytab.file'],
  308. security_params['hdfs-site']['dfs.secondary.namenode.kerberos.principal'],
  309. self.config_dict['hostname'],
  310. '/tmp')
  311. # Testing when hadoop.security.authentication is simple
  312. security_params['core-site']['hadoop.security.authentication'] = 'simple'
  313. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/snamenode.py",
  314. classname = "SNameNode",
  315. command = "security_status",
  316. config_file="secured.json",
  317. hdp_stack_version = self.STACK_VERSION,
  318. target = RMFTestCase.TARGET_COMMON_SERVICES
  319. )
  320. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  321. security_params['core-site']['hadoop.security.authentication'] = 'kerberos'
  322. # Testing that the exception throw by cached_executor is caught
  323. cached_kinit_executor_mock.reset_mock()
  324. cached_kinit_executor_mock.side_effect = Exception("Invalid command")
  325. try:
  326. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/snamenode.py",
  327. classname = "SNameNode",
  328. command = "security_status",
  329. config_file="secured.json",
  330. hdp_stack_version = self.STACK_VERSION,
  331. target = RMFTestCase.TARGET_COMMON_SERVICES
  332. )
  333. except:
  334. self.assertTrue(True)
  335. # Testing with a security_params which doesn't contains hdfs-site
  336. empty_security_params = {
  337. 'core-site': {
  338. 'hadoop.security.authentication': 'kerberos'
  339. }
  340. }
  341. cached_kinit_executor_mock.reset_mock()
  342. get_params_mock.reset_mock()
  343. put_structured_out_mock.reset_mock()
  344. get_params_mock.return_value = empty_security_params
  345. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/snamenode.py",
  346. classname = "SNameNode",
  347. command = "security_status",
  348. config_file="secured.json",
  349. hdp_stack_version = self.STACK_VERSION,
  350. target = RMFTestCase.TARGET_COMMON_SERVICES
  351. )
  352. put_structured_out_mock.assert_called_with({"securityIssuesFound": "Keytab file or principal are not set property."})
  353. # Testing with not empty result_issues
  354. result_issues_with_params = {
  355. 'hdfs-site': "Something bad happened"
  356. }
  357. validate_security_config_mock.reset_mock()
  358. get_params_mock.reset_mock()
  359. validate_security_config_mock.return_value = result_issues_with_params
  360. get_params_mock.return_value = security_params
  361. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/snamenode.py",
  362. classname = "SNameNode",
  363. command = "security_status",
  364. config_file="secured.json",
  365. hdp_stack_version = self.STACK_VERSION,
  366. target = RMFTestCase.TARGET_COMMON_SERVICES
  367. )
  368. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})