test_hive_server.py 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642
  1. #!/usr/bin/env python
  2. '''
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. '''
  17. import socket
  18. import subprocess
  19. from mock.mock import MagicMock, patch
  20. from resource_management.core import shell
  21. from resource_management.libraries.functions import dynamic_variable_interpretation
  22. from stacks.utils.RMFTestCase import *
  23. class TestHiveServer(RMFTestCase):
  24. COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
  25. STACK_VERSION = "2.0.6"
  26. UPGRADE_STACK_VERSION = "2.2"
  27. def test_configure_default(self):
  28. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  29. classname = "HiveServer",
  30. command = "configure",
  31. config_file="default.json",
  32. hdp_stack_version = self.STACK_VERSION,
  33. target = RMFTestCase.TARGET_COMMON_SERVICES
  34. )
  35. self.assert_configure_default()
  36. self.assertNoMoreResources()
  37. @patch.object(shell, "call", new=MagicMock(return_value=(0, '')))
  38. @patch.object(subprocess,"Popen")
  39. @patch("socket.socket")
  40. @patch.object(dynamic_variable_interpretation, "copy_tarballs_to_hdfs", new=MagicMock())
  41. def test_start_default(self, socket_mock, popen_mock):
  42. s = socket_mock.return_value
  43. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  44. classname = "HiveServer",
  45. command = "start",
  46. config_file="default.json",
  47. hdp_stack_version = self.STACK_VERSION,
  48. target = RMFTestCase.TARGET_COMMON_SERVICES
  49. )
  50. self.assert_configure_default()
  51. self.printResources()
  52. self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020/apps/hive/warehouse ',
  53. environment = {'PATH' : "/bin:/usr/lib/hive/bin:/usr/bin"},
  54. user = 'hive',
  55. )
  56. self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
  57. not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
  58. environment = {'HADOOP_HOME' : '/usr'},
  59. path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
  60. user = 'hive'
  61. )
  62. self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/lib/hive/lib//mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
  63. path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'], tries=5, try_sleep=10
  64. )
  65. self.assertNoMoreResources()
  66. self.assertTrue(socket_mock.called)
  67. self.assertTrue(s.close.called)
  68. @patch("socket.socket")
  69. @patch.object(dynamic_variable_interpretation, "copy_tarballs_to_hdfs", new=MagicMock())
  70. def test_stop_default(self, socket_mock):
  71. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  72. classname = "HiveServer",
  73. command = "stop",
  74. config_file="default.json",
  75. hdp_stack_version = self.STACK_VERSION,
  76. target = RMFTestCase.TARGET_COMMON_SERVICES
  77. )
  78. self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/hive/hive-server.pid`',
  79. not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)',
  80. )
  81. self.assertResourceCalled('Execute', 'sudo kill -9 `cat /var/run/hive/hive-server.pid`',
  82. not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1) || ( sleep 5 && ! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1) )',
  83. )
  84. self.assertResourceCalled('Execute', '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)',
  85. tries = 20,
  86. try_sleep = 3,
  87. )
  88. self.assertResourceCalled('File', '/var/run/hive/hive-server.pid',
  89. action = ['delete'],
  90. )
  91. self.assertNoMoreResources()
  92. self.assertFalse(socket_mock.called)
  93. def test_configure_secured(self):
  94. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  95. classname = "HiveServer",
  96. command = "configure",
  97. config_file="secured.json",
  98. hdp_stack_version = self.STACK_VERSION,
  99. target = RMFTestCase.TARGET_COMMON_SERVICES
  100. )
  101. self.assert_configure_secured()
  102. self.assertNoMoreResources()
  103. @patch("hive_service.check_fs_root")
  104. @patch("socket.socket")
  105. def test_start_secured(self, socket_mock, check_fs_root_mock):
  106. s = socket_mock.return_value
  107. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  108. classname = "HiveServer",
  109. command = "start",
  110. config_file="secured.json",
  111. hdp_stack_version = self.STACK_VERSION,
  112. target = RMFTestCase.TARGET_COMMON_SERVICES
  113. )
  114. self.assert_configure_secured()
  115. self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hive.service.keytab hive/c6402.ambari.apache.org@EXAMPLE.COM; ',
  116. user = 'hive',
  117. )
  118. self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
  119. not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
  120. environment = {'HADOOP_HOME' : '/usr'},
  121. path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
  122. user = 'hive'
  123. )
  124. self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/lib/hive/lib//mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
  125. path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'], tries=5, try_sleep=10
  126. )
  127. self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM; ',
  128. user = 'ambari-qa',
  129. )
  130. self.assertResourceCalled('Execute', "! beeline -u 'jdbc:hive2://c6401.ambari.apache.org:10000/;principal=hive/_HOST@EXAMPLE.COM' -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'",
  131. path = ['/bin/', '/usr/bin/', '/usr/lib/hive/bin/', '/usr/sbin/'],
  132. user = 'ambari-qa',
  133. timeout = 30,
  134. )
  135. self.assertNoMoreResources()
  136. self.assertTrue(check_fs_root_mock.called)
  137. @patch("socket.socket")
  138. def test_stop_secured(self, socket_mock):
  139. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  140. classname = "HiveServer",
  141. command = "stop",
  142. config_file="secured.json",
  143. hdp_stack_version = self.STACK_VERSION,
  144. target = RMFTestCase.TARGET_COMMON_SERVICES
  145. )
  146. self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/hive/hive-server.pid`',
  147. not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)',
  148. )
  149. self.assertResourceCalled('Execute', 'sudo kill -9 `cat /var/run/hive/hive-server.pid`',
  150. not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1) || ( sleep 5 && ! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1) )',
  151. )
  152. self.assertResourceCalled('Execute', '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)',
  153. tries = 20,
  154. try_sleep = 3,
  155. )
  156. self.assertResourceCalled('File', '/var/run/hive/hive-server.pid',
  157. action = ['delete'],
  158. )
  159. self.assertNoMoreResources()
  160. self.assertFalse(socket_mock.called)
  161. def assert_configure_default(self):
  162. self.assertResourceCalled('HdfsDirectory', '/apps/tez/',
  163. security_enabled = False,
  164. keytab = UnknownConfigurationMock(),
  165. conf_dir = '/etc/hadoop/conf',
  166. hdfs_user = 'hdfs',
  167. kinit_path_local = '/usr/bin/kinit',
  168. mode = 0755,
  169. owner = 'tez',
  170. bin_dir = '/usr/bin',
  171. action = ['create_delayed'],
  172. )
  173. self.assertResourceCalled('HdfsDirectory', '/apps/tez/lib/',
  174. security_enabled = False,
  175. keytab = UnknownConfigurationMock(),
  176. conf_dir = '/etc/hadoop/conf',
  177. hdfs_user = 'hdfs',
  178. kinit_path_local = '/usr/bin/kinit',
  179. mode = 0755,
  180. owner = 'tez',
  181. bin_dir = '/usr/bin',
  182. action = ['create_delayed'],
  183. )
  184. self.assertResourceCalled('HdfsDirectory', None,
  185. security_enabled = False,
  186. keytab = UnknownConfigurationMock(),
  187. conf_dir = '/etc/hadoop/conf',
  188. hdfs_user = 'hdfs',
  189. kinit_path_local = '/usr/bin/kinit',
  190. action = ['create'],
  191. bin_dir = '/usr/bin',
  192. )
  193. self.assertResourceCalled('CopyFromLocal', '/usr/lib/tez/tez*.jar',
  194. hadoop_bin_dir = '/usr/bin',
  195. hdfs_user = 'hdfs',
  196. owner = 'tez',
  197. dest_file = None,
  198. kinnit_if_needed = '',
  199. dest_dir = '/apps/tez/',
  200. hadoop_conf_dir = '/etc/hadoop/conf',
  201. mode = 0755,
  202. )
  203. self.assertResourceCalled('CopyFromLocal', '/usr/lib/tez/lib/*.jar',
  204. hadoop_conf_dir = '/etc/hadoop/conf',
  205. hdfs_user = 'hdfs',
  206. owner = 'tez',
  207. kinnit_if_needed = '',
  208. dest_dir = '/apps/tez/lib/',
  209. hadoop_bin_dir = '/usr/bin',
  210. mode = 0755,
  211. )
  212. self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
  213. security_enabled = False,
  214. keytab = UnknownConfigurationMock(),
  215. conf_dir = '/etc/hadoop/conf',
  216. hdfs_user = 'hdfs',
  217. kinit_path_local = '/usr/bin/kinit',
  218. mode = 0777,
  219. owner = 'hive',
  220. bin_dir = '/usr/bin',
  221. action = ['create_delayed'],
  222. )
  223. self.assertResourceCalled('HdfsDirectory', '/user/hive',
  224. security_enabled = False,
  225. keytab = UnknownConfigurationMock(),
  226. conf_dir = '/etc/hadoop/conf',
  227. hdfs_user = 'hdfs',
  228. kinit_path_local = '/usr/bin/kinit',
  229. mode = 0700,
  230. owner = 'hive',
  231. bin_dir = '/usr/bin',
  232. action = ['create_delayed'],
  233. )
  234. self.assertResourceCalled('HdfsDirectory', None,
  235. security_enabled = False,
  236. keytab = UnknownConfigurationMock(),
  237. conf_dir = '/etc/hadoop/conf',
  238. hdfs_user = 'hdfs',
  239. kinit_path_local = '/usr/bin/kinit',
  240. action = ['create'],
  241. bin_dir = '/usr/bin',
  242. )
  243. self.assertResourceCalled('Directory', '/etc/hive',
  244. mode = 0755,
  245. )
  246. self.assertResourceCalled('Directory', '/etc/hive/conf',
  247. owner = 'hive',
  248. group = 'hadoop',
  249. recursive = True,
  250. )
  251. self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
  252. group = 'hadoop',
  253. conf_dir = '/etc/hive/conf',
  254. mode = 0644,
  255. configuration_attributes = {u'final': {u'mapred.healthChecker.script.path': u'true',
  256. u'mapreduce.jobtracker.staging.root.dir': u'true'}},
  257. owner = 'hive',
  258. configurations = self.getConfig()['configurations']['mapred-site'],
  259. )
  260. self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
  261. owner = 'hive',
  262. group = 'hadoop',
  263. )
  264. self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
  265. owner = 'hive',
  266. group = 'hadoop',
  267. )
  268. self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
  269. content = 'log4jproperties\nline2',
  270. owner = 'hive',
  271. group = 'hadoop',
  272. mode = 0644,
  273. )
  274. self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
  275. content = 'log4jproperties\nline2',
  276. owner = 'hive',
  277. group = 'hadoop',
  278. mode = 0644,
  279. )
  280. self.assertResourceCalled('XmlConfig', 'hive-site.xml',
  281. group = 'hadoop',
  282. conf_dir = '/etc/hive/conf.server',
  283. mode = 0644,
  284. configuration_attributes = {u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
  285. u'javax.jdo.option.ConnectionDriverName': u'true',
  286. u'javax.jdo.option.ConnectionPassword': u'true'}},
  287. owner = 'hive',
  288. configurations = self.getConfig()['configurations']['hive-site'],
  289. )
  290. self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
  291. content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
  292. owner = 'hive',
  293. group = 'hadoop',
  294. )
  295. self.assertResourceCalled('Execute', ('cp',
  296. '--remove-destination',
  297. '/usr/share/java/mysql-connector-java.jar',
  298. '/usr/lib/hive/lib//mysql-connector-java.jar'),
  299. path = ['/bin', '/usr/bin/'],
  300. sudo = True,
  301. )
  302. self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar',
  303. content = DownloadSource('http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'),
  304. )
  305. self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
  306. content = Template('startHiveserver2.sh.j2'),
  307. mode = 0755,
  308. )
  309. self.assertResourceCalled('Directory', '/var/run/hive',
  310. owner = 'hive',
  311. group = 'hadoop',
  312. mode = 0755,
  313. recursive = True,
  314. )
  315. self.assertResourceCalled('Directory', '/var/log/hive',
  316. owner = 'hive',
  317. group = 'hadoop',
  318. mode = 0755,
  319. recursive = True,
  320. )
  321. self.assertResourceCalled('Directory', '/var/lib/hive',
  322. owner = 'hive',
  323. group = 'hadoop',
  324. mode = 0755,
  325. recursive = True,
  326. )
  327. def assert_configure_secured(self):
  328. self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
  329. security_enabled = True,
  330. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  331. conf_dir = '/etc/hadoop/conf',
  332. hdfs_user = 'hdfs',
  333. kinit_path_local = '/usr/bin/kinit',
  334. mode = 0777,
  335. owner = 'hive',
  336. bin_dir = '/usr/bin',
  337. action = ['create_delayed'],
  338. )
  339. self.assertResourceCalled('HdfsDirectory', '/user/hive',
  340. security_enabled = True,
  341. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  342. conf_dir = '/etc/hadoop/conf',
  343. hdfs_user = 'hdfs',
  344. kinit_path_local = '/usr/bin/kinit',
  345. mode = 0700,
  346. owner = 'hive',
  347. bin_dir = '/usr/bin',
  348. action = ['create_delayed'],
  349. )
  350. self.assertResourceCalled('HdfsDirectory', None,
  351. security_enabled = True,
  352. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  353. conf_dir = '/etc/hadoop/conf',
  354. hdfs_user = 'hdfs',
  355. kinit_path_local = '/usr/bin/kinit',
  356. action = ['create'],
  357. bin_dir = '/usr/bin',
  358. )
  359. self.assertResourceCalled('Directory', '/etc/hive',
  360. mode = 0755,
  361. )
  362. self.assertResourceCalled('Directory', '/etc/hive/conf',
  363. owner = 'hive',
  364. group = 'hadoop',
  365. recursive = True,
  366. )
  367. self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
  368. group = 'hadoop',
  369. conf_dir = '/etc/hive/conf',
  370. mode = 0644,
  371. configuration_attributes = {u'final': {u'mapred.healthChecker.script.path': u'true',
  372. u'mapreduce.jobtracker.staging.root.dir': u'true'}},
  373. owner = 'hive',
  374. configurations = self.getConfig()['configurations']['mapred-site'],
  375. )
  376. self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
  377. owner = 'hive',
  378. group = 'hadoop',
  379. )
  380. self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
  381. owner = 'hive',
  382. group = 'hadoop',
  383. )
  384. self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
  385. content = 'log4jproperties\nline2',
  386. owner = 'hive',
  387. group = 'hadoop',
  388. mode = 0644,
  389. )
  390. self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
  391. content = 'log4jproperties\nline2',
  392. owner = 'hive',
  393. group = 'hadoop',
  394. mode = 0644,
  395. )
  396. self.assertResourceCalled('XmlConfig', 'hive-site.xml',
  397. group = 'hadoop',
  398. conf_dir = '/etc/hive/conf.server',
  399. mode = 0644,
  400. configuration_attributes = {u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
  401. u'javax.jdo.option.ConnectionDriverName': u'true',
  402. u'javax.jdo.option.ConnectionPassword': u'true'}},
  403. owner = 'hive',
  404. configurations = self.getConfig()['configurations']['hive-site'],
  405. )
  406. self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
  407. content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
  408. owner = 'hive',
  409. group = 'hadoop',
  410. )
  411. self.assertResourceCalled('Execute', ('cp',
  412. '--remove-destination',
  413. '/usr/share/java/mysql-connector-java.jar',
  414. '/usr/lib/hive/lib//mysql-connector-java.jar'),
  415. path = ['/bin', '/usr/bin/'],
  416. sudo = True,
  417. )
  418. self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar',
  419. content = DownloadSource('http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'),
  420. )
  421. self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
  422. content = Template('startHiveserver2.sh.j2'),
  423. mode = 0755,
  424. )
  425. self.assertResourceCalled('Directory', '/var/run/hive',
  426. owner = 'hive',
  427. group = 'hadoop',
  428. mode = 0755,
  429. recursive = True,
  430. )
  431. self.assertResourceCalled('Directory', '/var/log/hive',
  432. owner = 'hive',
  433. group = 'hadoop',
  434. mode = 0755,
  435. recursive = True,
  436. )
  437. self.assertResourceCalled('Directory', '/var/lib/hive',
  438. owner = 'hive',
  439. group = 'hadoop',
  440. mode = 0755,
  441. recursive = True,
  442. )
  443. @patch("hive_service.check_fs_root")
  444. @patch("time.time")
  445. @patch("socket.socket")
  446. def test_socket_timeout(self, socket_mock, time_mock, check_fs_root_mock):
  447. s = socket_mock.return_value
  448. s.connect = MagicMock()
  449. s.connect.side_effect = socket.error("")
  450. time_mock.side_effect = [0, 1000, 2000, 3000, 4000]
  451. try:
  452. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  453. classname = "HiveServer",
  454. command = "start",
  455. config_file="default.json",
  456. hdp_stack_version = self.STACK_VERSION,
  457. target = RMFTestCase.TARGET_COMMON_SERVICES
  458. )
  459. self.fail("Script failure due to socket error was expected")
  460. except:
  461. self.assert_configure_default()
  462. self.assertFalse(socket_mock.called)
  463. self.assertFalse(s.close.called)
  464. @patch("hive_server.HiveServer.pre_rolling_restart")
  465. @patch("hive_server.HiveServer.start")
  466. @patch.object(shell, "call", new=MagicMock(return_value=(0,"hive-server2 - 2.2.0.0-2041")))
  467. def test_stop_during_upgrade(self, hive_server_start_mock,
  468. hive_server_pre_rolling_mock):
  469. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  470. classname = "HiveServer", command = "restart", config_file = "hive-upgrade.json",
  471. hdp_stack_version = self.UPGRADE_STACK_VERSION,
  472. target = RMFTestCase.TARGET_COMMON_SERVICES )
  473. self.assertResourceCalled('Execute', 'hive --service hiveserver2 --deregister 2.2.0.0-2041',
  474. path=['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/current/hadoop-client/bin'],
  475. tries=1, user='hive')
  476. self.assertResourceCalled('Execute', 'hdp-select set hive-server2 2.2.1.0-2065',)
  477. @patch("hive_server.HiveServer.pre_rolling_restart")
  478. @patch("hive_server.HiveServer.start")
  479. @patch.object(shell, "call", new=MagicMock(return_value=(0,"BAD VERSION")))
  480. def test_stop_during_upgrade_bad_hive_version(self, hive_server_start_mock,
  481. hive_server_pre_rolling_mock):
  482. try:
  483. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  484. classname = "HiveServer", command = "restart", config_file = "hive-upgrade.json",
  485. hdp_stack_version = self.UPGRADE_STACK_VERSION,
  486. target = RMFTestCase.TARGET_COMMON_SERVICES )
  487. self.fail("Invalid hive version should have caused an exception")
  488. except:
  489. pass
  490. self.assertNoMoreResources()
  491. @patch("resource_management.libraries.functions.security_commons.build_expectations")
  492. @patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
  493. @patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
  494. @patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
  495. @patch("resource_management.libraries.script.Script.put_structured_out")
  496. def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
  497. # Test that function works when is called with correct parameters
  498. import status_params
  499. security_params = {
  500. 'hive-site': {
  501. "hive.server2.authentication": "KERBEROS",
  502. "hive.metastore.sasl.enabled": "true",
  503. "hive.security.authorization.enabled": "true",
  504. "hive.server2.authentication.kerberos.keytab": "path/to/keytab",
  505. "hive.server2.authentication.kerberos.principal": "principal",
  506. "hive.server2.authentication.spnego.keytab": "path/to/spnego_keytab",
  507. "hive.server2.authentication.spnego.principal": "spnego_principal"
  508. }
  509. }
  510. result_issues = []
  511. props_value_check = {"hive.server2.authentication": "KERBEROS",
  512. "hive.metastore.sasl.enabled": "true",
  513. "hive.security.authorization.enabled": "true"}
  514. props_empty_check = ["hive.server2.authentication.kerberos.keytab",
  515. "hive.server2.authentication.kerberos.principal",
  516. "hive.server2.authentication.spnego.principal",
  517. "hive.server2.authentication.spnego.keytab"]
  518. props_read_check = ["hive.server2.authentication.kerberos.keytab",
  519. "hive.server2.authentication.spnego.keytab"]
  520. get_params_mock.return_value = security_params
  521. validate_security_config_mock.return_value = result_issues
  522. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  523. classname = "HiveServer",
  524. command = "security_status",
  525. config_file="../../2.1/configs/secured.json",
  526. hdp_stack_version = self.STACK_VERSION,
  527. target = RMFTestCase.TARGET_COMMON_SERVICES
  528. )
  529. get_params_mock.assert_called_with(status_params.hive_conf_dir, {'hive-site.xml': "XML"})
  530. build_exp_mock.assert_called_with('hive-site', props_value_check, props_empty_check, props_read_check)
  531. put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
  532. self.assertTrue(cached_kinit_executor_mock.call_count, 2)
  533. cached_kinit_executor_mock.assert_called_with(status_params.kinit_path_local,
  534. status_params.hive_user,
  535. security_params['hive-site']['hive.server2.authentication.spnego.keytab'],
  536. security_params['hive-site']['hive.server2.authentication.spnego.principal'],
  537. status_params.hostname,
  538. status_params.tmp_dir)
  539. # Testing that the exception throw by cached_executor is caught
  540. cached_kinit_executor_mock.reset_mock()
  541. cached_kinit_executor_mock.side_effect = Exception("Invalid command")
  542. try:
  543. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  544. classname = "HiveServer",
  545. command = "security_status",
  546. config_file="../../2.1/configs/secured.json",
  547. hdp_stack_version = self.STACK_VERSION,
  548. target = RMFTestCase.TARGET_COMMON_SERVICES
  549. )
  550. except:
  551. self.assertTrue(True)
  552. # Testing with a security_params which doesn't contains startup
  553. empty_security_params = {}
  554. cached_kinit_executor_mock.reset_mock()
  555. get_params_mock.reset_mock()
  556. put_structured_out_mock.reset_mock()
  557. get_params_mock.return_value = empty_security_params
  558. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  559. classname = "HiveServer",
  560. command = "security_status",
  561. config_file="../../2.1/configs/secured.json",
  562. hdp_stack_version = self.STACK_VERSION,
  563. target = RMFTestCase.TARGET_COMMON_SERVICES
  564. )
  565. put_structured_out_mock.assert_called_with({"securityIssuesFound": "Keytab file or principal are not set property."})
  566. # Testing with not empty result_issues
  567. result_issues_with_params = {}
  568. result_issues_with_params['hive-site']="Something bad happened"
  569. validate_security_config_mock.reset_mock()
  570. get_params_mock.reset_mock()
  571. validate_security_config_mock.return_value = result_issues_with_params
  572. get_params_mock.return_value = security_params
  573. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  574. classname = "HiveServer",
  575. command = "security_status",
  576. config_file="../../2.1/configs/secured.json",
  577. hdp_stack_version = self.STACK_VERSION,
  578. target = RMFTestCase.TARGET_COMMON_SERVICES
  579. )
  580. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  581. # Testing with security_enable = false
  582. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  583. classname = "HiveServer",
  584. command = "security_status",
  585. config_file="../../2.1/configs/default.json",
  586. hdp_stack_version = self.STACK_VERSION,
  587. target = RMFTestCase.TARGET_COMMON_SERVICES
  588. )
  589. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})