test_nfsgateway.py 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393
  1. #!/usr/bin/env python
  2. '''
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. '''
  17. import os
  18. import json
  19. from stacks.utils.RMFTestCase import *
  20. from mock.mock import MagicMock, patch
  21. from only_for_platform import not_for_platform, PLATFORM_WINDOWS
  22. # NFS GATEWAY is always started by root using jsvc due to rpcbind bugs
  23. # on Linux such as CentOS6.2. https://bugzilla.redhat.com/show_bug.cgi?id=731542
  24. @not_for_platform(PLATFORM_WINDOWS)
  25. class TestNFSGateway(RMFTestCase):
  26. COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package"
  27. STACK_VERSION = "2.0.6"
  28. UPGRADE_STACK_VERSION = "2.2"
  29. def test_configure_default(self):
  30. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
  31. classname = "NFSGateway",
  32. command = "configure",
  33. config_file = "default.json",
  34. hdp_stack_version = self.STACK_VERSION,
  35. target = RMFTestCase.TARGET_COMMON_SERVICES
  36. )
  37. self.assert_configure_default()
  38. self.assertNoMoreResources()
  39. @patch("hdfs_nfsgateway.prepare_rpcbind")
  40. def test_start_default(self, prepare_rpcbind_mock):
  41. prepare_rpcbind_mock.returnvalue = 0
  42. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
  43. classname = "NFSGateway",
  44. command = "start",
  45. config_file = "default.json",
  46. hdp_stack_version = self.STACK_VERSION,
  47. target = RMFTestCase.TARGET_COMMON_SERVICES
  48. )
  49. self.assert_configure_default()
  50. self.assertResourceCalled('Directory', '/var/run/hadoop',
  51. owner = 'root',
  52. group = 'root',
  53. mode = 0755
  54. )
  55. self.assertResourceCalled('Directory', '/var/run/hadoop/root',
  56. owner = 'root',
  57. recursive = True,
  58. )
  59. self.assertResourceCalled('Directory', '/var/log/hadoop/root',
  60. owner = 'root',
  61. group = 'hadoop',
  62. mode = 0775
  63. )
  64. self.assertResourceCalled('File', '/var/run/hadoop/root/hadoop_privileged_nfs3.pid',
  65. action = ['delete'],
  66. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/root/hadoop_privileged_nfs3.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/root/hadoop_privileged_nfs3.pid",
  67. )
  68. self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start nfs3',
  69. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec',
  70. 'HADOOP_PRIVILEGED_NFS_LOG_DIR': u'/var/log/hadoop/root',
  71. 'HADOOP_PRIVILEGED_NFS_PID_DIR': u'/var/run/hadoop/root',
  72. 'HADOOP_PRIVILEGED_NFS_USER': u'hdfs'},
  73. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/root/hadoop_privileged_nfs3.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/root/hadoop_privileged_nfs3.pid",
  74. )
  75. self.assertNoMoreResources()
  76. def test_stop_default(self):
  77. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
  78. classname = "NFSGateway",
  79. command = "stop",
  80. config_file = "default.json",
  81. hdp_stack_version = self.STACK_VERSION,
  82. target = RMFTestCase.TARGET_COMMON_SERVICES
  83. )
  84. self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop nfs3',
  85. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec',
  86. 'HADOOP_PRIVILEGED_NFS_LOG_DIR': u'/var/log/hadoop/root',
  87. 'HADOOP_PRIVILEGED_NFS_PID_DIR': u'/var/run/hadoop/root',
  88. 'HADOOP_PRIVILEGED_NFS_USER': u'hdfs'},
  89. only_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/root/hadoop_privileged_nfs3.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/root/hadoop_privileged_nfs3.pid")
  90. self.assertResourceCalled('File', '/var/run/hadoop/root/hadoop_privileged_nfs3.pid', action = ['delete'])
  91. self.assertNoMoreResources()
  92. def test_configure_secured(self):
  93. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
  94. classname = "NFSGateway",
  95. command = "configure",
  96. config_file = "secured.json",
  97. hdp_stack_version = self.STACK_VERSION,
  98. target = RMFTestCase.TARGET_COMMON_SERVICES
  99. )
  100. self.assert_configure_secured()
  101. self.assertNoMoreResources()
  102. @patch("hdfs_nfsgateway.prepare_rpcbind")
  103. def test_start_secured(self, prepare_rpcbind_mock):
  104. prepare_rpcbind_mock.returnvalue = 0
  105. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
  106. classname = "NFSGateway",
  107. command = "start",
  108. config_file = "secured.json",
  109. hdp_stack_version = self.STACK_VERSION,
  110. target = RMFTestCase.TARGET_COMMON_SERVICES
  111. )
  112. self.assert_configure_secured()
  113. self.assertResourceCalled('Directory', '/var/run/hadoop',
  114. owner = 'root',
  115. group = 'root',
  116. mode = 0755
  117. )
  118. self.assertResourceCalled('Directory', '/var/run/hadoop/root',
  119. owner = 'root',
  120. recursive = True,
  121. )
  122. self.assertResourceCalled('Directory', '/var/log/hadoop/root',
  123. owner = 'root',
  124. group = 'hadoop',
  125. mode = 0775
  126. )
  127. self.assertResourceCalled('File', '/var/run/hadoop/root/hadoop_privileged_nfs3.pid',
  128. action = ['delete'],
  129. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/root/hadoop_privileged_nfs3.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/root/hadoop_privileged_nfs3.pid",
  130. )
  131. self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start nfs3',
  132. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec',
  133. 'HADOOP_PRIVILEGED_NFS_LOG_DIR': u'/var/log/hadoop/root',
  134. 'HADOOP_PRIVILEGED_NFS_PID_DIR': u'/var/run/hadoop/root',
  135. 'HADOOP_PRIVILEGED_NFS_USER': u'hdfs'},
  136. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/root/hadoop_privileged_nfs3.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/root/hadoop_privileged_nfs3.pid",
  137. )
  138. self.assertNoMoreResources()
  139. def test_stop_secured(self):
  140. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
  141. classname = "NFSGateway",
  142. command = "stop",
  143. config_file = "secured.json",
  144. hdp_stack_version = self.STACK_VERSION,
  145. target = RMFTestCase.TARGET_COMMON_SERVICES
  146. )
  147. self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop nfs3',
  148. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec',
  149. 'HADOOP_PRIVILEGED_NFS_LOG_DIR': u'/var/log/hadoop/root',
  150. 'HADOOP_PRIVILEGED_NFS_PID_DIR': u'/var/run/hadoop/root',
  151. 'HADOOP_PRIVILEGED_NFS_USER': u'hdfs'},
  152. only_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/root/hadoop_privileged_nfs3.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/root/hadoop_privileged_nfs3.pid")
  153. self.assertResourceCalled('File', '/var/run/hadoop/root/hadoop_privileged_nfs3.pid', action = ['delete'])
  154. self.assertNoMoreResources()
  155. def assert_configure_default(self):
  156. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
  157. recursive = True,
  158. )
  159. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64',
  160. recursive = True,
  161. )
  162. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
  163. to = '/usr/lib/hadoop/lib/libsnappy.so',
  164. )
  165. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
  166. to = '/usr/lib/hadoop/lib64/libsnappy.so',
  167. )
  168. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  169. owner = 'root',
  170. group = 'root',
  171. recursive = True,
  172. )
  173. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  174. content = Template('hdfs.conf.j2'),
  175. owner = 'root',
  176. group = 'root',
  177. mode = 0644,
  178. )
  179. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  180. owner = 'hdfs',
  181. group = 'hadoop',
  182. conf_dir = '/etc/hadoop/conf',
  183. configurations = self.getConfig()['configurations']['hdfs-site'],
  184. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  185. )
  186. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  187. owner = 'hdfs',
  188. group = 'hadoop',
  189. conf_dir = '/etc/hadoop/conf',
  190. configurations = self.getConfig()['configurations']['core-site'],
  191. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  192. mode = 0644
  193. )
  194. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  195. content = Template('slaves.j2'),
  196. owner = 'hdfs',
  197. )
  198. def assert_configure_secured(self):
  199. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
  200. recursive = True,
  201. )
  202. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64',
  203. recursive = True,
  204. )
  205. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
  206. to = '/usr/lib/hadoop/lib/libsnappy.so',
  207. )
  208. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
  209. to = '/usr/lib/hadoop/lib64/libsnappy.so',
  210. )
  211. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  212. owner = 'root',
  213. group = 'root',
  214. recursive = True,
  215. )
  216. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  217. content = Template('hdfs.conf.j2'),
  218. owner = 'root',
  219. group = 'root',
  220. mode = 0644,
  221. )
  222. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  223. owner = 'hdfs',
  224. group = 'hadoop',
  225. conf_dir = '/etc/hadoop/conf',
  226. configurations = self.getConfig()['configurations']['hdfs-site'],
  227. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  228. )
  229. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  230. owner = 'hdfs',
  231. group = 'hadoop',
  232. conf_dir = '/etc/hadoop/conf',
  233. configurations = self.getConfig()['configurations']['core-site'],
  234. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  235. mode = 0644
  236. )
  237. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  238. content = Template('slaves.j2'),
  239. owner = 'root',
  240. )
  241. @patch("resource_management.libraries.functions.security_commons.build_expectations")
  242. @patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
  243. @patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
  244. @patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
  245. @patch("resource_management.libraries.script.Script.put_structured_out")
  246. def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
  247. # Test that function works when is called with correct parameters
  248. security_params = {
  249. 'core-site': {
  250. 'hadoop.security.authentication': 'kerberos'
  251. },
  252. 'hdfs-site': {
  253. 'nfs.keytab.file': 'path/to/nfsgateway/keytab/file',
  254. 'nfs.kerberos.principal': 'nfs_principal'
  255. }
  256. }
  257. props_value_check = None
  258. props_empty_check = ['nfs.keytab.file',
  259. 'nfs.kerberos.principal']
  260. props_read_check = ['nfs.keytab.file']
  261. result_issues = []
  262. get_params_mock.return_value = security_params
  263. validate_security_config_mock.return_value = result_issues
  264. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
  265. classname = "NFSGateway",
  266. command = "security_status",
  267. config_file="secured.json",
  268. hdp_stack_version = self.STACK_VERSION,
  269. target = RMFTestCase.TARGET_COMMON_SERVICES
  270. )
  271. build_exp_mock.assert_called_with('hdfs-site', props_value_check, props_empty_check, props_read_check)
  272. put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
  273. cached_kinit_executor_mock.called_with('/usr/bin/kinit',
  274. self.config_dict['configurations']['hadoop-env']['hdfs_user'],
  275. security_params['hdfs-site']['nfs.keytab.file'],
  276. security_params['hdfs-site']['nfs.kerberos.principal'],
  277. self.config_dict['hostname'],
  278. '/tmp')
  279. # Testing when hadoop.security.authentication is simple
  280. security_params['core-site']['hadoop.security.authentication'] = 'simple'
  281. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
  282. classname = "NFSGateway",
  283. command = "security_status",
  284. config_file="secured.json",
  285. hdp_stack_version = self.STACK_VERSION,
  286. target = RMFTestCase.TARGET_COMMON_SERVICES
  287. )
  288. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  289. security_params['core-site']['hadoop.security.authentication'] = 'kerberos'
  290. # Testing that the exception throw by cached_executor is caught
  291. cached_kinit_executor_mock.reset_mock()
  292. cached_kinit_executor_mock.side_effect = Exception("Invalid command")
  293. try:
  294. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
  295. classname = "NFSGateway",
  296. command = "security_status",
  297. config_file="secured.json",
  298. hdp_stack_version = self.STACK_VERSION,
  299. target = RMFTestCase.TARGET_COMMON_SERVICES
  300. )
  301. except:
  302. self.assertTrue(True)
  303. # Testing with a security_params which doesn't contains hdfs-site
  304. empty_security_params = {
  305. 'core-site': {
  306. 'hadoop.security.authentication': 'kerberos'
  307. }
  308. }
  309. cached_kinit_executor_mock.reset_mock()
  310. get_params_mock.reset_mock()
  311. put_structured_out_mock.reset_mock()
  312. get_params_mock.return_value = empty_security_params
  313. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
  314. classname = "NFSGateway",
  315. command = "security_status",
  316. config_file="secured.json",
  317. hdp_stack_version = self.STACK_VERSION,
  318. target = RMFTestCase.TARGET_COMMON_SERVICES
  319. )
  320. put_structured_out_mock.assert_called_with({"securityIssuesFound": "Keytab file or principal are not set property."})
  321. # Testing with not empty result_issues
  322. result_issues_with_params = {
  323. 'hdfs-site': "Something bad happened"
  324. }
  325. validate_security_config_mock.reset_mock()
  326. get_params_mock.reset_mock()
  327. validate_security_config_mock.return_value = result_issues_with_params
  328. get_params_mock.return_value = security_params
  329. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
  330. classname = "NFSGateway",
  331. command = "security_status",
  332. config_file="secured.json",
  333. hdp_stack_version = self.STACK_VERSION,
  334. target = RMFTestCase.TARGET_COMMON_SERVICES
  335. )
  336. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  337. self.assertNoMoreResources()
  338. @patch("resource_management.core.shell.call")
  339. def test_pre_upgrade_restart(self, call_mock):
  340. call_mock.side_effects = [(0, None), (0, None)]
  341. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
  342. with open(config_file, "r") as f:
  343. json_content = json.load(f)
  344. version = '2.3.1.0-3242'
  345. json_content['commandParams']['version'] = version
  346. stack_version = '2.3'
  347. json_content['hostLevelParams']['stack_version'] = stack_version
  348. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nfsgateway.py",
  349. classname = "NFSGateway",
  350. command = "pre_upgrade_restart",
  351. config_dict = json_content,
  352. hdp_stack_version = self.STACK_VERSION,
  353. target = RMFTestCase.TARGET_COMMON_SERVICES,
  354. call_mocks = [(0, None), (0, None), (0, None), (0, None)])
  355. self.assertResourceCalled('Execute',
  356. ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-hdfs-nfs3', version), sudo=True,)
  357. self.assertNoMoreResources()