test_hive_server.py 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640
  1. #!/usr/bin/env python
  2. '''
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. '''
  17. import socket
  18. import subprocess
  19. from mock.mock import MagicMock, patch
  20. from resource_management.core import shell
  21. from resource_management.libraries.functions import dynamic_variable_interpretation
  22. from stacks.utils.RMFTestCase import *
  23. class TestHiveServer(RMFTestCase):
  24. COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
  25. STACK_VERSION = "2.0.6"
  26. UPGRADE_STACK_VERSION = "2.2"
  27. def test_configure_default(self):
  28. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  29. classname = "HiveServer",
  30. command = "configure",
  31. config_file="default.json",
  32. hdp_stack_version = self.STACK_VERSION,
  33. target = RMFTestCase.TARGET_COMMON_SERVICES
  34. )
  35. self.assert_configure_default()
  36. self.assertNoMoreResources()
  37. @patch.object(shell, "call", new=MagicMock(return_value=(0, '')))
  38. @patch.object(subprocess,"Popen")
  39. @patch("socket.socket")
  40. @patch.object(dynamic_variable_interpretation, "copy_tarballs_to_hdfs", new=MagicMock())
  41. def test_start_default(self, socket_mock, popen_mock):
  42. s = socket_mock.return_value
  43. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  44. classname = "HiveServer",
  45. command = "start",
  46. config_file="default.json",
  47. hdp_stack_version = self.STACK_VERSION,
  48. target = RMFTestCase.TARGET_COMMON_SERVICES
  49. )
  50. self.assert_configure_default()
  51. self.assertResourceCalled('Execute', 'metatool -updateLocation hdfs://c6401.ambari.apache.org:8020/apps/hive/warehouse ',
  52. environment = {'PATH' : "/bin:/usr/lib/hive/bin:/usr/bin"},
  53. user = 'hive',
  54. )
  55. self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
  56. not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
  57. environment = {'HADOOP_HOME' : '/usr'},
  58. path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
  59. user = 'hive'
  60. )
  61. self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/lib/hive/lib//mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
  62. path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'], tries=5, try_sleep=10
  63. )
  64. self.assertNoMoreResources()
  65. self.assertTrue(socket_mock.called)
  66. self.assertTrue(s.close.called)
  67. @patch("socket.socket")
  68. @patch.object(dynamic_variable_interpretation, "copy_tarballs_to_hdfs", new=MagicMock())
  69. def test_stop_default(self, socket_mock):
  70. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  71. classname = "HiveServer",
  72. command = "stop",
  73. config_file="default.json",
  74. hdp_stack_version = self.STACK_VERSION,
  75. target = RMFTestCase.TARGET_COMMON_SERVICES
  76. )
  77. self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/hive/hive-server.pid`',
  78. not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)',
  79. )
  80. self.assertResourceCalled('Execute', 'sudo kill -9 `cat /var/run/hive/hive-server.pid`',
  81. not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1) || ( sleep 5 && ! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1) )',
  82. )
  83. self.assertResourceCalled('Execute', '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)',
  84. tries = 20,
  85. try_sleep = 3,
  86. )
  87. self.assertResourceCalled('File', '/var/run/hive/hive-server.pid',
  88. action = ['delete'],
  89. )
  90. self.assertNoMoreResources()
  91. self.assertFalse(socket_mock.called)
  92. def test_configure_secured(self):
  93. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  94. classname = "HiveServer",
  95. command = "configure",
  96. config_file="secured.json",
  97. hdp_stack_version = self.STACK_VERSION,
  98. target = RMFTestCase.TARGET_COMMON_SERVICES
  99. )
  100. self.assert_configure_secured()
  101. self.assertNoMoreResources()
  102. @patch("hive_service.check_fs_root")
  103. @patch("socket.socket")
  104. def test_start_secured(self, socket_mock, check_fs_root_mock):
  105. s = socket_mock.return_value
  106. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  107. classname = "HiveServer",
  108. command = "start",
  109. config_file="secured.json",
  110. hdp_stack_version = self.STACK_VERSION,
  111. target = RMFTestCase.TARGET_COMMON_SERVICES
  112. )
  113. self.assert_configure_secured()
  114. self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
  115. not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
  116. environment = {'HADOOP_HOME' : '/usr'},
  117. path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
  118. user = 'hive'
  119. )
  120. self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/lib/hive/lib//mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
  121. path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'], tries=5, try_sleep=10
  122. )
  123. self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM; ',
  124. user = 'ambari-qa',
  125. )
  126. self.assertResourceCalled('Execute', "! beeline -u 'jdbc:hive2://c6401.ambari.apache.org:10000/;principal=hive/_HOST@EXAMPLE.COM' -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'",
  127. path = ['/bin/', '/usr/bin/', '/usr/lib/hive/bin/', '/usr/sbin/'],
  128. user = 'ambari-qa',
  129. timeout = 30,
  130. )
  131. self.assertNoMoreResources()
  132. self.assertTrue(check_fs_root_mock.called)
  133. @patch("socket.socket")
  134. def test_stop_secured(self, socket_mock):
  135. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  136. classname = "HiveServer",
  137. command = "stop",
  138. config_file="secured.json",
  139. hdp_stack_version = self.STACK_VERSION,
  140. target = RMFTestCase.TARGET_COMMON_SERVICES
  141. )
  142. self.assertResourceCalled('Execute', 'sudo kill `cat /var/run/hive/hive-server.pid`',
  143. not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)',
  144. )
  145. self.assertResourceCalled('Execute', 'sudo kill -9 `cat /var/run/hive/hive-server.pid`',
  146. not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1) || ( sleep 5 && ! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1) )',
  147. )
  148. self.assertResourceCalled('Execute', '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)',
  149. tries = 20,
  150. try_sleep = 3,
  151. )
  152. self.assertResourceCalled('File', '/var/run/hive/hive-server.pid',
  153. action = ['delete'],
  154. )
  155. self.assertNoMoreResources()
  156. self.assertFalse(socket_mock.called)
  157. def assert_configure_default(self):
  158. self.assertResourceCalled('HdfsDirectory', '/apps/tez/',
  159. security_enabled = False,
  160. keytab = UnknownConfigurationMock(),
  161. conf_dir = '/etc/hadoop/conf',
  162. hdfs_user = 'hdfs',
  163. kinit_path_local = '/usr/bin/kinit',
  164. mode = 0755,
  165. owner = 'tez',
  166. bin_dir = '/usr/bin',
  167. action = ['create_delayed'],
  168. )
  169. self.assertResourceCalled('HdfsDirectory', '/apps/tez/lib/',
  170. security_enabled = False,
  171. keytab = UnknownConfigurationMock(),
  172. conf_dir = '/etc/hadoop/conf',
  173. hdfs_user = 'hdfs',
  174. kinit_path_local = '/usr/bin/kinit',
  175. mode = 0755,
  176. owner = 'tez',
  177. bin_dir = '/usr/bin',
  178. action = ['create_delayed'],
  179. )
  180. self.assertResourceCalled('HdfsDirectory', None,
  181. security_enabled = False,
  182. keytab = UnknownConfigurationMock(),
  183. conf_dir = '/etc/hadoop/conf',
  184. hdfs_user = 'hdfs',
  185. kinit_path_local = '/usr/bin/kinit',
  186. action = ['create'],
  187. bin_dir = '/usr/bin',
  188. )
  189. self.assertResourceCalled('CopyFromLocal', '/usr/lib/tez/tez*.jar',
  190. hadoop_bin_dir = '/usr/bin',
  191. hdfs_user = 'hdfs',
  192. owner = 'tez',
  193. dest_file = None,
  194. kinnit_if_needed = '',
  195. dest_dir = '/apps/tez/',
  196. hadoop_conf_dir = '/etc/hadoop/conf',
  197. mode = 0755,
  198. )
  199. self.assertResourceCalled('CopyFromLocal', '/usr/lib/tez/lib/*.jar',
  200. hadoop_conf_dir = '/etc/hadoop/conf',
  201. hdfs_user = 'hdfs',
  202. owner = 'tez',
  203. kinnit_if_needed = '',
  204. dest_dir = '/apps/tez/lib/',
  205. hadoop_bin_dir = '/usr/bin',
  206. mode = 0755,
  207. )
  208. self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
  209. security_enabled = False,
  210. keytab = UnknownConfigurationMock(),
  211. conf_dir = '/etc/hadoop/conf',
  212. hdfs_user = 'hdfs',
  213. kinit_path_local = '/usr/bin/kinit',
  214. mode = 0777,
  215. owner = 'hive',
  216. bin_dir = '/usr/bin',
  217. action = ['create_delayed'],
  218. )
  219. self.assertResourceCalled('HdfsDirectory', '/user/hive',
  220. security_enabled = False,
  221. keytab = UnknownConfigurationMock(),
  222. conf_dir = '/etc/hadoop/conf',
  223. hdfs_user = 'hdfs',
  224. kinit_path_local = '/usr/bin/kinit',
  225. mode = 0700,
  226. owner = 'hive',
  227. bin_dir = '/usr/bin',
  228. action = ['create_delayed'],
  229. )
  230. self.assertResourceCalled('HdfsDirectory', None,
  231. security_enabled = False,
  232. keytab = UnknownConfigurationMock(),
  233. conf_dir = '/etc/hadoop/conf',
  234. hdfs_user = 'hdfs',
  235. kinit_path_local = '/usr/bin/kinit',
  236. action = ['create'],
  237. bin_dir = '/usr/bin',
  238. )
  239. self.assertResourceCalled('Directory', '/etc/hive',
  240. mode = 0755,
  241. )
  242. self.assertResourceCalled('Directory', '/etc/hive/conf',
  243. owner = 'hive',
  244. group = 'hadoop',
  245. recursive = True,
  246. )
  247. self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
  248. group = 'hadoop',
  249. conf_dir = '/etc/hive/conf',
  250. mode = 0644,
  251. configuration_attributes = {u'final': {u'mapred.healthChecker.script.path': u'true',
  252. u'mapreduce.jobtracker.staging.root.dir': u'true'}},
  253. owner = 'hive',
  254. configurations = self.getConfig()['configurations']['mapred-site'],
  255. )
  256. self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
  257. owner = 'hive',
  258. group = 'hadoop',
  259. )
  260. self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
  261. owner = 'hive',
  262. group = 'hadoop',
  263. )
  264. self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
  265. content = 'log4jproperties\nline2',
  266. owner = 'hive',
  267. group = 'hadoop',
  268. mode = 0644,
  269. )
  270. self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
  271. content = 'log4jproperties\nline2',
  272. owner = 'hive',
  273. group = 'hadoop',
  274. mode = 0644,
  275. )
  276. self.assertResourceCalled('XmlConfig', 'hive-site.xml',
  277. group = 'hadoop',
  278. conf_dir = '/etc/hive/conf.server',
  279. mode = 0644,
  280. configuration_attributes = {u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
  281. u'javax.jdo.option.ConnectionDriverName': u'true',
  282. u'javax.jdo.option.ConnectionPassword': u'true'}},
  283. owner = 'hive',
  284. configurations = self.getConfig()['configurations']['hive-site'],
  285. )
  286. self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
  287. content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
  288. owner = 'hive',
  289. group = 'hadoop',
  290. )
  291. self.assertResourceCalled('Execute', ('cp',
  292. '--remove-destination',
  293. '/usr/share/java/mysql-connector-java.jar',
  294. '/usr/lib/hive/lib//mysql-connector-java.jar'),
  295. path = ['/bin', '/usr/bin/'],
  296. sudo = True,
  297. )
  298. self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
  299. environment = {'no_proxy': u'c6401.ambari.apache.org'},
  300. not_if = '[ -f /usr/lib/ambari-agent/DBConnectionVerification.jar ]',
  301. )
  302. self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
  303. content = Template('startHiveserver2.sh.j2'),
  304. mode = 0755,
  305. )
  306. self.assertResourceCalled('Directory', '/var/run/hive',
  307. owner = 'hive',
  308. group = 'hadoop',
  309. mode = 0755,
  310. recursive = True,
  311. )
  312. self.assertResourceCalled('Directory', '/var/log/hive',
  313. owner = 'hive',
  314. group = 'hadoop',
  315. mode = 0755,
  316. recursive = True,
  317. )
  318. self.assertResourceCalled('Directory', '/var/lib/hive',
  319. owner = 'hive',
  320. group = 'hadoop',
  321. mode = 0755,
  322. recursive = True,
  323. )
  324. def assert_configure_secured(self):
  325. self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
  326. security_enabled = True,
  327. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  328. conf_dir = '/etc/hadoop/conf',
  329. hdfs_user = 'hdfs',
  330. kinit_path_local = '/usr/bin/kinit',
  331. mode = 0777,
  332. owner = 'hive',
  333. bin_dir = '/usr/bin',
  334. action = ['create_delayed'],
  335. )
  336. self.assertResourceCalled('HdfsDirectory', '/user/hive',
  337. security_enabled = True,
  338. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  339. conf_dir = '/etc/hadoop/conf',
  340. hdfs_user = 'hdfs',
  341. kinit_path_local = '/usr/bin/kinit',
  342. mode = 0700,
  343. owner = 'hive',
  344. bin_dir = '/usr/bin',
  345. action = ['create_delayed'],
  346. )
  347. self.assertResourceCalled('HdfsDirectory', None,
  348. security_enabled = True,
  349. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  350. conf_dir = '/etc/hadoop/conf',
  351. hdfs_user = 'hdfs',
  352. kinit_path_local = '/usr/bin/kinit',
  353. action = ['create'],
  354. bin_dir = '/usr/bin',
  355. )
  356. self.assertResourceCalled('Directory', '/etc/hive',
  357. mode = 0755,
  358. )
  359. self.assertResourceCalled('Directory', '/etc/hive/conf',
  360. owner = 'hive',
  361. group = 'hadoop',
  362. recursive = True,
  363. )
  364. self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
  365. group = 'hadoop',
  366. conf_dir = '/etc/hive/conf',
  367. mode = 0644,
  368. configuration_attributes = {u'final': {u'mapred.healthChecker.script.path': u'true',
  369. u'mapreduce.jobtracker.staging.root.dir': u'true'}},
  370. owner = 'hive',
  371. configurations = self.getConfig()['configurations']['mapred-site'],
  372. )
  373. self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
  374. owner = 'hive',
  375. group = 'hadoop',
  376. )
  377. self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
  378. owner = 'hive',
  379. group = 'hadoop',
  380. )
  381. self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
  382. content = 'log4jproperties\nline2',
  383. owner = 'hive',
  384. group = 'hadoop',
  385. mode = 0644,
  386. )
  387. self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
  388. content = 'log4jproperties\nline2',
  389. owner = 'hive',
  390. group = 'hadoop',
  391. mode = 0644,
  392. )
  393. self.assertResourceCalled('XmlConfig', 'hive-site.xml',
  394. group = 'hadoop',
  395. conf_dir = '/etc/hive/conf.server',
  396. mode = 0644,
  397. configuration_attributes = {u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
  398. u'javax.jdo.option.ConnectionDriverName': u'true',
  399. u'javax.jdo.option.ConnectionPassword': u'true'}},
  400. owner = 'hive',
  401. configurations = self.getConfig()['configurations']['hive-site'],
  402. )
  403. self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
  404. content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
  405. owner = 'hive',
  406. group = 'hadoop',
  407. )
  408. self.assertResourceCalled('Execute', ('cp',
  409. '--remove-destination',
  410. '/usr/share/java/mysql-connector-java.jar',
  411. '/usr/lib/hive/lib//mysql-connector-java.jar'),
  412. path = ['/bin', '/usr/bin/'],
  413. sudo = True,
  414. )
  415. self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
  416. environment = {'no_proxy': u'c6401.ambari.apache.org'},
  417. not_if = '[ -f /usr/lib/ambari-agent/DBConnectionVerification.jar ]',
  418. )
  419. self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
  420. content = Template('startHiveserver2.sh.j2'),
  421. mode = 0755,
  422. )
  423. self.assertResourceCalled('Directory', '/var/run/hive',
  424. owner = 'hive',
  425. group = 'hadoop',
  426. mode = 0755,
  427. recursive = True,
  428. )
  429. self.assertResourceCalled('Directory', '/var/log/hive',
  430. owner = 'hive',
  431. group = 'hadoop',
  432. mode = 0755,
  433. recursive = True,
  434. )
  435. self.assertResourceCalled('Directory', '/var/lib/hive',
  436. owner = 'hive',
  437. group = 'hadoop',
  438. mode = 0755,
  439. recursive = True,
  440. )
  441. @patch("hive_service.check_fs_root")
  442. @patch("time.time")
  443. @patch("socket.socket")
  444. def test_socket_timeout(self, socket_mock, time_mock, check_fs_root_mock):
  445. s = socket_mock.return_value
  446. s.connect = MagicMock()
  447. s.connect.side_effect = socket.error("")
  448. time_mock.side_effect = [0, 1000, 2000, 3000, 4000]
  449. try:
  450. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  451. classname = "HiveServer",
  452. command = "start",
  453. config_file="default.json",
  454. hdp_stack_version = self.STACK_VERSION,
  455. target = RMFTestCase.TARGET_COMMON_SERVICES
  456. )
  457. self.fail("Script failure due to socket error was expected")
  458. except:
  459. self.assert_configure_default()
  460. self.assertFalse(socket_mock.called)
  461. self.assertFalse(s.close.called)
  462. @patch("hive_server.HiveServer.pre_rolling_restart")
  463. @patch("hive_server.HiveServer.start")
  464. @patch.object(shell, "call", new=MagicMock(return_value=(0,"hive-server2 - 2.2.0.0-2041")))
  465. def test_stop_during_upgrade(self, hive_server_start_mock,
  466. hive_server_pre_rolling_mock):
  467. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  468. classname = "HiveServer", command = "restart", config_file = "hive-upgrade.json",
  469. hdp_stack_version = self.UPGRADE_STACK_VERSION,
  470. target = RMFTestCase.TARGET_COMMON_SERVICES )
  471. self.assertResourceCalled('Execute', 'hive --service hiveserver2 --deregister 2.2.0.0-2041',
  472. path=['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/current/hadoop-client/bin'],
  473. tries=1, user='hive')
  474. self.assertResourceCalled('Execute', 'hdp-select set hive-server2 2.2.1.0-2065',)
  475. @patch("hive_server.HiveServer.pre_rolling_restart")
  476. @patch("hive_server.HiveServer.start")
  477. @patch.object(shell, "call", new=MagicMock(return_value=(0,"BAD VERSION")))
  478. def test_stop_during_upgrade_bad_hive_version(self, hive_server_start_mock,
  479. hive_server_pre_rolling_mock):
  480. try:
  481. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  482. classname = "HiveServer", command = "restart", config_file = "hive-upgrade.json",
  483. hdp_stack_version = self.UPGRADE_STACK_VERSION,
  484. target = RMFTestCase.TARGET_COMMON_SERVICES )
  485. self.fail("Invalid hive version should have caused an exception")
  486. except:
  487. pass
  488. self.assertNoMoreResources()
  489. @patch("resource_management.libraries.functions.security_commons.build_expectations")
  490. @patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
  491. @patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
  492. @patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
  493. @patch("resource_management.libraries.script.Script.put_structured_out")
  494. def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
  495. # Test that function works when is called with correct parameters
  496. import status_params
  497. security_params = {
  498. 'hive-site': {
  499. "hive.server2.authentication": "KERBEROS",
  500. "hive.metastore.sasl.enabled": "true",
  501. "hive.security.authorization.enabled": "true",
  502. "hive.server2.authentication.kerberos.keytab": "path/to/keytab",
  503. "hive.server2.authentication.kerberos.principal": "principal",
  504. "hive.server2.authentication.spnego.keytab": "path/to/spnego_keytab",
  505. "hive.server2.authentication.spnego.principal": "spnego_principal"
  506. }
  507. }
  508. result_issues = []
  509. props_value_check = {"hive.server2.authentication": "KERBEROS",
  510. "hive.metastore.sasl.enabled": "true",
  511. "hive.security.authorization.enabled": "true"}
  512. props_empty_check = ["hive.server2.authentication.kerberos.keytab",
  513. "hive.server2.authentication.kerberos.principal",
  514. "hive.server2.authentication.spnego.principal",
  515. "hive.server2.authentication.spnego.keytab"]
  516. props_read_check = ["hive.server2.authentication.kerberos.keytab",
  517. "hive.server2.authentication.spnego.keytab"]
  518. get_params_mock.return_value = security_params
  519. validate_security_config_mock.return_value = result_issues
  520. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  521. classname = "HiveServer",
  522. command = "security_status",
  523. config_file="../../2.1/configs/secured.json",
  524. hdp_stack_version = self.STACK_VERSION,
  525. target = RMFTestCase.TARGET_COMMON_SERVICES
  526. )
  527. get_params_mock.assert_called_with(status_params.hive_conf_dir, {'hive-site.xml': "XML"})
  528. build_exp_mock.assert_called_with('hive-site', props_value_check, props_empty_check, props_read_check)
  529. put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
  530. self.assertTrue(cached_kinit_executor_mock.call_count, 2)
  531. cached_kinit_executor_mock.assert_called_with(status_params.kinit_path_local,
  532. status_params.hive_user,
  533. security_params['hive-site']['hive.server2.authentication.spnego.keytab'],
  534. security_params['hive-site']['hive.server2.authentication.spnego.principal'],
  535. status_params.hostname,
  536. status_params.tmp_dir)
  537. # Testing that the exception throw by cached_executor is caught
  538. cached_kinit_executor_mock.reset_mock()
  539. cached_kinit_executor_mock.side_effect = Exception("Invalid command")
  540. try:
  541. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  542. classname = "HiveServer",
  543. command = "security_status",
  544. config_file="../../2.1/configs/secured.json",
  545. hdp_stack_version = self.STACK_VERSION,
  546. target = RMFTestCase.TARGET_COMMON_SERVICES
  547. )
  548. except:
  549. self.assertTrue(True)
  550. # Testing with a security_params which doesn't contains startup
  551. empty_security_params = {}
  552. cached_kinit_executor_mock.reset_mock()
  553. get_params_mock.reset_mock()
  554. put_structured_out_mock.reset_mock()
  555. get_params_mock.return_value = empty_security_params
  556. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  557. classname = "HiveServer",
  558. command = "security_status",
  559. config_file="../../2.1/configs/secured.json",
  560. hdp_stack_version = self.STACK_VERSION,
  561. target = RMFTestCase.TARGET_COMMON_SERVICES
  562. )
  563. put_structured_out_mock.assert_called_with({"securityIssuesFound": "Keytab file or principal are not set property."})
  564. # Testing with not empty result_issues
  565. result_issues_with_params = {}
  566. result_issues_with_params['hive-site']="Something bad happened"
  567. validate_security_config_mock.reset_mock()
  568. get_params_mock.reset_mock()
  569. validate_security_config_mock.return_value = result_issues_with_params
  570. get_params_mock.return_value = security_params
  571. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  572. classname = "HiveServer",
  573. command = "security_status",
  574. config_file="../../2.1/configs/secured.json",
  575. hdp_stack_version = self.STACK_VERSION,
  576. target = RMFTestCase.TARGET_COMMON_SERVICES
  577. )
  578. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  579. # Testing with security_enable = false
  580. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  581. classname = "HiveServer",
  582. command = "security_status",
  583. config_file="../../2.1/configs/default.json",
  584. hdp_stack_version = self.STACK_VERSION,
  585. target = RMFTestCase.TARGET_COMMON_SERVICES
  586. )
  587. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})