test_hive_server.py 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711
  1. #!/usr/bin/env python
  2. '''
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. '''
  17. import json
  18. import socket
  19. import subprocess
  20. from mock.mock import MagicMock, patch
  21. from resource_management.core import shell
  22. from resource_management.libraries.functions import dynamic_variable_interpretation
  23. from stacks.utils.RMFTestCase import *
  24. @patch("resource_management.libraries.functions.check_thrift_port_sasl", new=MagicMock())
  25. class TestHiveServer(RMFTestCase):
  26. COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
  27. STACK_VERSION = "2.0.6"
  28. UPGRADE_STACK_VERSION = "2.2"
  29. def test_configure_default(self):
  30. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  31. classname = "HiveServer",
  32. command = "configure",
  33. config_file="default.json",
  34. hdp_stack_version = self.STACK_VERSION,
  35. target = RMFTestCase.TARGET_COMMON_SERVICES
  36. )
  37. self.assert_configure_default()
  38. self.assertNoMoreResources()
  39. @patch("socket.socket")
  40. @patch.object(dynamic_variable_interpretation, "copy_tarballs_to_hdfs", new=MagicMock())
  41. def test_start_default(self, socket_mock):
  42. s = socket_mock.return_value
  43. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  44. classname="HiveServer",
  45. command="start",
  46. config_file="default.json",
  47. hdp_stack_version=self.STACK_VERSION,
  48. target=RMFTestCase.TARGET_COMMON_SERVICES
  49. )
  50. self.assert_configure_default()
  51. self.assertResourceCalled('Execute',
  52. 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
  53. environment={'PATH': '/bin:/usr/lib/hive/bin:/usr/bin'},
  54. user='hive'
  55. )
  56. self.assertResourceCalled('Execute',
  57. '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
  58. environment={'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
  59. not_if='ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
  60. user='hive',
  61. path=['/bin:/usr/lib/hive/bin:/usr/bin']
  62. )
  63. self.assertResourceCalled('Execute',
  64. '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/lib/hive/lib//mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
  65. path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
  66. tries=5,
  67. try_sleep=10
  68. )
  69. self.assertNoMoreResources()
  70. @patch.object(dynamic_variable_interpretation, "_get_tar_source_and_dest_folder")
  71. @patch("socket.socket")
  72. def test_start_default_no_copy(self, socket_mock, get_tar_mock):
  73. s = socket_mock.return_value
  74. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  75. classname = "HiveServer",
  76. command = "start",
  77. config_file="default_no_install.json",
  78. hdp_stack_version = self.STACK_VERSION,
  79. target = RMFTestCase.TARGET_COMMON_SERVICES
  80. )
  81. get_tar_mock.return_value = ("a", "b")
  82. self.assert_configure_default()
  83. self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
  84. environment = {'PATH': '/bin:/usr/lib/hive/bin:/usr/bin'},
  85. user = 'hive',
  86. )
  87. self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
  88. not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
  89. environment = {'HADOOP_HOME' : '/usr', 'JAVA_HOME':'/usr/jdk64/jdk1.7.0_45'},
  90. path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
  91. user = 'hive'
  92. )
  93. self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/lib/hive/lib//mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
  94. path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'], tries=5, try_sleep=10
  95. )
  96. self.assertNoMoreResources()
  97. self.assertTrue(socket_mock.called)
  98. self.assertTrue(s.close.called)
  99. self.assertFalse(get_tar_mock.called)
  100. @patch("socket.socket")
  101. @patch.object(dynamic_variable_interpretation, "copy_tarballs_to_hdfs", new=MagicMock())
  102. def test_stop_default(self, socket_mock):
  103. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  104. classname = "HiveServer",
  105. command = "stop",
  106. config_file="default.json",
  107. hdp_stack_version = self.STACK_VERSION,
  108. target = RMFTestCase.TARGET_COMMON_SERVICES
  109. )
  110. self.assertResourceCalled('Execute', 'ambari-sudo.sh kill `cat /var/run/hive/hive-server.pid`',
  111. not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)',
  112. )
  113. self.assertResourceCalled('Execute', 'ambari-sudo.sh kill -9 `cat /var/run/hive/hive-server.pid`',
  114. not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1) || ( sleep 5 && ! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1) )',
  115. )
  116. self.assertResourceCalled('Execute', '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)',
  117. tries = 20,
  118. try_sleep = 3,
  119. )
  120. self.assertResourceCalled('File', '/var/run/hive/hive-server.pid',
  121. action = ['delete'],
  122. )
  123. self.assertNoMoreResources()
  124. def test_configure_secured(self):
  125. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  126. classname = "HiveServer",
  127. command = "configure",
  128. config_file="secured.json",
  129. hdp_stack_version = self.STACK_VERSION,
  130. target = RMFTestCase.TARGET_COMMON_SERVICES
  131. )
  132. self.assert_configure_secured()
  133. self.assertNoMoreResources()
  134. @patch("hive_service.check_fs_root")
  135. @patch("socket.socket")
  136. def test_start_secured(self, socket_mock, check_fs_root_mock):
  137. s = socket_mock.return_value
  138. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  139. classname = "HiveServer",
  140. command = "start",
  141. config_file="secured.json",
  142. hdp_stack_version = self.STACK_VERSION,
  143. target = RMFTestCase.TARGET_COMMON_SERVICES
  144. )
  145. self.assert_configure_secured()
  146. self.assertResourceCalled('Execute',
  147. '/usr/bin/kinit -kt /etc/security/keytabs/hive.service.keytab hive/c6401.ambari.apache.org@EXAMPLE.COM; ',
  148. user='hive',
  149. )
  150. self.assertResourceCalled('Execute',
  151. '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
  152. environment={'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
  153. not_if='ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
  154. user='hive',
  155. path=['/bin:/usr/lib/hive/bin:/usr/bin'],
  156. )
  157. self.assertResourceCalled('Execute',
  158. '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/lib/hive/lib//mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
  159. path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
  160. tries=5,
  161. try_sleep=10,
  162. )
  163. self.assertResourceCalled('Execute',
  164. '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM; ',
  165. user='ambari-qa',
  166. )
  167. self.assertResourceCalled('Execute',
  168. "! beeline -u 'jdbc:hive2://c6401.ambari.apache.org:10000/;principal=hive/_HOST@EXAMPLE.COM' -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'",
  169. path=['/bin/', '/usr/bin/', '/usr/lib/hive/bin/', '/usr/sbin/'],
  170. user='ambari-qa',
  171. timeout=30,
  172. )
  173. self.assertNoMoreResources()
  174. self.assertTrue(check_fs_root_mock.called)
  175. @patch("socket.socket")
  176. def test_stop_secured(self, socket_mock):
  177. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  178. classname = "HiveServer",
  179. command = "stop",
  180. config_file="secured.json",
  181. hdp_stack_version = self.STACK_VERSION,
  182. target = RMFTestCase.TARGET_COMMON_SERVICES
  183. )
  184. self.assertResourceCalled('Execute', 'ambari-sudo.sh kill `cat /var/run/hive/hive-server.pid`',
  185. not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)',
  186. )
  187. self.assertResourceCalled('Execute', 'ambari-sudo.sh kill -9 `cat /var/run/hive/hive-server.pid`',
  188. not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1) || ( sleep 5 && ! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1) )',
  189. )
  190. self.assertResourceCalled('Execute', '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)',
  191. tries = 20,
  192. try_sleep = 3,
  193. )
  194. self.assertResourceCalled('File', '/var/run/hive/hive-server.pid',
  195. action = ['delete'],
  196. )
  197. self.assertNoMoreResources()
  198. def assert_configure_default(self):
  199. self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
  200. security_enabled=False,
  201. keytab=UnknownConfigurationMock(),
  202. conf_dir='/etc/hadoop/conf',
  203. hdfs_user='hdfs',
  204. kinit_path_local='/usr/bin/kinit',
  205. mode=0777,
  206. owner='hive',
  207. bin_dir='/usr/bin',
  208. action=['create_delayed'],
  209. )
  210. self.assertResourceCalled('HdfsDirectory', '/user/hive',
  211. security_enabled=False,
  212. keytab=UnknownConfigurationMock(),
  213. conf_dir='/etc/hadoop/conf',
  214. hdfs_user='hdfs',
  215. kinit_path_local='/usr/bin/kinit',
  216. mode=0700,
  217. owner='hive',
  218. bin_dir='/usr/bin',
  219. action=['create_delayed'],
  220. )
  221. self.assertResourceCalled('HdfsDirectory', '/custompath/tmp/hive',
  222. security_enabled=False,
  223. keytab=UnknownConfigurationMock(),
  224. conf_dir='/etc/hadoop/conf',
  225. hdfs_user='hdfs',
  226. kinit_path_local='/usr/bin/kinit',
  227. mode=0777,
  228. owner='hive',
  229. group='hdfs',
  230. action=['create_delayed'],
  231. bin_dir='/usr/bin',
  232. )
  233. self.assertResourceCalled('HdfsDirectory', None,
  234. security_enabled=False,
  235. keytab=UnknownConfigurationMock(),
  236. conf_dir='/etc/hadoop/conf',
  237. hdfs_user='hdfs',
  238. kinit_path_local='/usr/bin/kinit',
  239. action=['create'],
  240. bin_dir='/usr/bin',
  241. )
  242. self.assertResourceCalled('Directory', '/etc/hive',
  243. mode=0755,
  244. )
  245. self.assertResourceCalled('Directory', '/etc/hive/conf',
  246. owner='hive',
  247. group='hadoop',
  248. recursive=True,
  249. )
  250. self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
  251. group='hadoop',
  252. conf_dir='/etc/hive/conf',
  253. mode=0644,
  254. configuration_attributes={u'final': {u'mapred.healthChecker.script.path': u'true',
  255. u'mapreduce.jobtracker.staging.root.dir': u'true'}},
  256. owner='hive',
  257. configurations=self.getConfig()['configurations']['mapred-site'],
  258. )
  259. self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
  260. owner='hive',
  261. group='hadoop',
  262. )
  263. self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
  264. owner='hive',
  265. group='hadoop',
  266. )
  267. self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
  268. content='log4jproperties\nline2',
  269. owner='hive',
  270. group='hadoop',
  271. mode=0644,
  272. )
  273. self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
  274. content='log4jproperties\nline2',
  275. owner='hive',
  276. group='hadoop',
  277. mode=0644,
  278. )
  279. self.assertResourceCalled('XmlConfig', 'hive-site.xml',
  280. group='hadoop',
  281. conf_dir='/etc/hive/conf.server',
  282. mode=0644,
  283. configuration_attributes={u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
  284. u'javax.jdo.option.ConnectionDriverName': u'true',
  285. u'javax.jdo.option.ConnectionPassword': u'true'}},
  286. owner='hive',
  287. configurations=self.getConfig()['configurations']['hive-site'],
  288. )
  289. self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
  290. content=InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
  291. owner='hive',
  292. group='hadoop',
  293. )
  294. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  295. owner='root',
  296. group='root',
  297. recursive=True,
  298. )
  299. self.assertResourceCalled('File', '/etc/security/limits.d/hive.conf',
  300. content=Template('hive.conf.j2'),
  301. owner='root',
  302. group='root',
  303. mode=0644,
  304. )
  305. self.assertResourceCalled('Execute', ('cp',
  306. '--remove-destination',
  307. '/usr/share/java/mysql-connector-java.jar',
  308. '/usr/lib/hive/lib//mysql-connector-java.jar'),
  309. path=['/bin', '/usr/bin/'],
  310. sudo=True,
  311. )
  312. self.assertResourceCalled('File', '/usr/lib/hive/lib//mysql-connector-java.jar',
  313. mode=0644,
  314. )
  315. self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar',
  316. content=DownloadSource('http://c6401.ambari.apache.org:8080/resources'
  317. '/DBConnectionVerification.jar'),
  318. )
  319. self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
  320. content=Template('startHiveserver2.sh.j2'),
  321. mode=0755,
  322. )
  323. self.assertResourceCalled('Directory', '/var/run/hive',
  324. owner='hive',
  325. mode=0755,
  326. group='hadoop',
  327. recursive=True,
  328. cd_access='a',
  329. )
  330. self.assertResourceCalled('Directory', '/var/log/hive',
  331. owner='hive',
  332. mode=0755,
  333. group='hadoop',
  334. recursive=True,
  335. cd_access='a',
  336. )
  337. self.assertResourceCalled('Directory', '/var/lib/hive',
  338. owner='hive',
  339. mode=0755,
  340. group='hadoop',
  341. recursive=True,
  342. cd_access='a',
  343. )
  344. def assert_configure_secured(self):
  345. self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
  346. security_enabled=True,
  347. keytab='/etc/security/keytabs/hdfs.headless.keytab',
  348. conf_dir='/etc/hadoop/conf',
  349. hdfs_user='hdfs',
  350. kinit_path_local='/usr/bin/kinit',
  351. mode=0777,
  352. owner='hive',
  353. bin_dir='/usr/bin',
  354. action=['create_delayed'],
  355. )
  356. self.assertResourceCalled('HdfsDirectory', '/user/hive',
  357. security_enabled=True,
  358. keytab='/etc/security/keytabs/hdfs.headless.keytab',
  359. conf_dir='/etc/hadoop/conf',
  360. hdfs_user='hdfs',
  361. kinit_path_local='/usr/bin/kinit',
  362. mode=0700,
  363. owner='hive',
  364. bin_dir='/usr/bin',
  365. action=['create_delayed'],
  366. )
  367. self.assertResourceCalled('HdfsDirectory', '/custompath/tmp/hive',
  368. security_enabled=True,
  369. keytab='/etc/security/keytabs/hdfs.headless.keytab',
  370. conf_dir='/etc/hadoop/conf',
  371. hdfs_user='hdfs',
  372. kinit_path_local='/usr/bin/kinit',
  373. mode=0777,
  374. owner='hive',
  375. group='hdfs',
  376. action=['create_delayed'],
  377. bin_dir='/usr/bin',
  378. )
  379. self.assertResourceCalled('HdfsDirectory', None,
  380. security_enabled=True,
  381. keytab='/etc/security/keytabs/hdfs.headless.keytab',
  382. conf_dir='/etc/hadoop/conf',
  383. hdfs_user='hdfs',
  384. kinit_path_local='/usr/bin/kinit',
  385. action=['create'],
  386. bin_dir='/usr/bin',
  387. )
  388. self.assertResourceCalled('Directory', '/etc/hive',
  389. mode=0755,
  390. )
  391. self.assertResourceCalled('Directory', '/etc/hive/conf',
  392. owner='hive',
  393. group='hadoop',
  394. recursive=True,
  395. )
  396. self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
  397. group='hadoop',
  398. conf_dir='/etc/hive/conf',
  399. mode=0644,
  400. configuration_attributes={u'final': {u'mapred.healthChecker.script.path': u'true',
  401. u'mapreduce.jobtracker.staging.root.dir': u'true'}},
  402. owner='hive',
  403. configurations=self.getConfig()['configurations']['mapred-site'],
  404. )
  405. self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
  406. owner='hive',
  407. group='hadoop',
  408. )
  409. self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
  410. owner='hive',
  411. group='hadoop',
  412. )
  413. self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
  414. content='log4jproperties\nline2',
  415. owner='hive',
  416. group='hadoop',
  417. mode=0644,
  418. )
  419. self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
  420. content='log4jproperties\nline2',
  421. owner='hive',
  422. group='hadoop',
  423. mode=0644,
  424. )
  425. self.assertResourceCalled('XmlConfig', 'hive-site.xml',
  426. group='hadoop',
  427. conf_dir='/etc/hive/conf.server',
  428. mode=0644,
  429. configuration_attributes={u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
  430. u'javax.jdo.option.ConnectionDriverName': u'true',
  431. u'javax.jdo.option.ConnectionPassword': u'true'}},
  432. owner='hive',
  433. configurations=self.getConfig()['configurations']['hive-site'],
  434. )
  435. self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
  436. content=InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
  437. owner='hive',
  438. group='hadoop',
  439. )
  440. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  441. owner='root',
  442. group='root',
  443. recursive=True,
  444. )
  445. self.assertResourceCalled('File', '/etc/security/limits.d/hive.conf',
  446. content=Template('hive.conf.j2'),
  447. owner='root',
  448. group='root',
  449. mode=0644,
  450. )
  451. self.assertResourceCalled('Execute', ('cp',
  452. '--remove-destination',
  453. '/usr/share/java/mysql-connector-java.jar',
  454. '/usr/lib/hive/lib//mysql-connector-java.jar'),
  455. path=['/bin', '/usr/bin/'],
  456. sudo=True,
  457. )
  458. self.assertResourceCalled('File', '/usr/lib/hive/lib//mysql-connector-java.jar',
  459. mode=0644,
  460. )
  461. self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar',
  462. content=DownloadSource(
  463. 'http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'),
  464. )
  465. self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
  466. content=Template('startHiveserver2.sh.j2'),
  467. mode=0755,
  468. )
  469. self.assertResourceCalled('Directory', '/var/run/hive',
  470. owner='hive',
  471. group='hadoop',
  472. mode=0755,
  473. recursive=True,
  474. cd_access='a',
  475. )
  476. self.assertResourceCalled('Directory', '/var/log/hive',
  477. owner='hive',
  478. group='hadoop',
  479. mode=0755,
  480. recursive=True,
  481. cd_access='a',
  482. )
  483. self.assertResourceCalled('Directory', '/var/lib/hive',
  484. owner='hive',
  485. group='hadoop',
  486. mode=0755,
  487. recursive=True,
  488. cd_access='a',
  489. )
  490. @patch("hive_service.check_fs_root")
  491. @patch("time.time")
  492. @patch("socket.socket")
  493. def test_socket_timeout(self, socket_mock, time_mock, check_fs_root_mock):
  494. s = socket_mock.return_value
  495. s.connect = MagicMock()
  496. s.connect.side_effect = socket.error("")
  497. time_mock.side_effect = [0, 1000, 2000, 3000, 4000]
  498. try:
  499. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  500. classname = "HiveServer",
  501. command = "start",
  502. config_file="default.json",
  503. hdp_stack_version = self.STACK_VERSION,
  504. target = RMFTestCase.TARGET_COMMON_SERVICES
  505. )
  506. self.fail("Script failure due to socket error was expected")
  507. except:
  508. self.assert_configure_default()
  509. @patch("hive_server.HiveServer.pre_rolling_restart")
  510. @patch("hive_server.HiveServer.start")
  511. def test_stop_during_upgrade(self, hive_server_start_mock,
  512. hive_server_pre_rolling_mock):
  513. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  514. classname = "HiveServer", command = "restart", config_file = "hive-upgrade.json",
  515. hdp_stack_version = self.UPGRADE_STACK_VERSION,
  516. target = RMFTestCase.TARGET_COMMON_SERVICES,
  517. call_mocks = [(0,"hive-server2 - 2.2.0.0-2041"), (0,"hive-server2 - 2.2.0.0-2041")]
  518. )
  519. self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service hiveserver2 --deregister 2.2.0.0-2041',
  520. path=['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/current/hadoop-client/bin'],
  521. tries=1, user='hive')
  522. self.assertResourceCalled('Execute', 'hdp-select set hive-server2 2.2.1.0-2065',)
  523. @patch("hive_server.HiveServer.pre_rolling_restart")
  524. @patch("hive_server.HiveServer.start")
  525. def test_stop_during_upgrade_bad_hive_version(self, hive_server_start_mock, hive_server_pre_rolling_mock):
  526. try:
  527. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  528. classname = "HiveServer", command = "restart", config_file = "hive-upgrade.json",
  529. hdp_stack_version = self.UPGRADE_STACK_VERSION,
  530. target = RMFTestCase.TARGET_COMMON_SERVICES,
  531. call_mocks = [(0,"BAD VERSION")])
  532. self.fail("Invalid hive version should have caused an exception")
  533. except:
  534. pass
  535. self.assertNoMoreResources()
  536. @patch("resource_management.libraries.functions.security_commons.build_expectations")
  537. @patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
  538. @patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
  539. @patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
  540. @patch("resource_management.libraries.script.Script.put_structured_out")
  541. def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
  542. # Test that function works when is called with correct parameters
  543. security_params = {
  544. 'hive-site': {
  545. "hive.server2.authentication": "KERBEROS",
  546. "hive.metastore.sasl.enabled": "true",
  547. "hive.security.authorization.enabled": "true",
  548. "hive.server2.authentication.kerberos.keytab": "path/to/keytab",
  549. "hive.server2.authentication.kerberos.principal": "principal",
  550. "hive.server2.authentication.spnego.keytab": "path/to/spnego_keytab",
  551. "hive.server2.authentication.spnego.principal": "spnego_principal"
  552. }
  553. }
  554. result_issues = []
  555. props_value_check = {"hive.server2.authentication": "KERBEROS",
  556. "hive.metastore.sasl.enabled": "true",
  557. "hive.security.authorization.enabled": "true"}
  558. props_empty_check = ["hive.server2.authentication.kerberos.keytab",
  559. "hive.server2.authentication.kerberos.principal",
  560. "hive.server2.authentication.spnego.principal",
  561. "hive.server2.authentication.spnego.keytab"]
  562. props_read_check = ["hive.server2.authentication.kerberos.keytab",
  563. "hive.server2.authentication.spnego.keytab"]
  564. get_params_mock.return_value = security_params
  565. validate_security_config_mock.return_value = result_issues
  566. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  567. classname = "HiveServer",
  568. command = "security_status",
  569. config_file="../../2.1/configs/secured.json",
  570. hdp_stack_version = self.STACK_VERSION,
  571. target = RMFTestCase.TARGET_COMMON_SERVICES
  572. )
  573. get_params_mock.assert_called_with('/etc/hive/conf', {'hive-site.xml': "XML"})
  574. build_exp_mock.assert_called_with('hive-site', props_value_check, props_empty_check, props_read_check)
  575. put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
  576. self.assertTrue(cached_kinit_executor_mock.call_count, 2)
  577. cached_kinit_executor_mock.assert_called_with('/usr/bin/kinit',
  578. self.config_dict['configurations']['hive-env']['hive_user'],
  579. security_params['hive-site']['hive.server2.authentication.spnego.keytab'],
  580. security_params['hive-site']['hive.server2.authentication.spnego.principal'],
  581. self.config_dict['hostname'],
  582. '/tmp')
  583. # Testing that the exception throw by cached_executor is caught
  584. cached_kinit_executor_mock.reset_mock()
  585. cached_kinit_executor_mock.side_effect = Exception("Invalid command")
  586. try:
  587. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  588. classname = "HiveServer",
  589. command = "security_status",
  590. config_file="../../2.1/configs/secured.json",
  591. hdp_stack_version = self.STACK_VERSION,
  592. target = RMFTestCase.TARGET_COMMON_SERVICES
  593. )
  594. except:
  595. self.assertTrue(True)
  596. # Testing with a security_params which doesn't contains startup
  597. empty_security_params = {}
  598. cached_kinit_executor_mock.reset_mock()
  599. get_params_mock.reset_mock()
  600. put_structured_out_mock.reset_mock()
  601. get_params_mock.return_value = empty_security_params
  602. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  603. classname = "HiveServer",
  604. command = "security_status",
  605. config_file="../../2.1/configs/secured.json",
  606. hdp_stack_version = self.STACK_VERSION,
  607. target = RMFTestCase.TARGET_COMMON_SERVICES
  608. )
  609. put_structured_out_mock.assert_called_with({"securityIssuesFound": "Keytab file or principal are not set property."})
  610. # Testing with not empty result_issues
  611. result_issues_with_params = {}
  612. result_issues_with_params['hive-site']="Something bad happened"
  613. validate_security_config_mock.reset_mock()
  614. get_params_mock.reset_mock()
  615. validate_security_config_mock.return_value = result_issues_with_params
  616. get_params_mock.return_value = security_params
  617. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  618. classname = "HiveServer",
  619. command = "security_status",
  620. config_file="../../2.1/configs/secured.json",
  621. hdp_stack_version = self.STACK_VERSION,
  622. target = RMFTestCase.TARGET_COMMON_SERVICES
  623. )
  624. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  625. # Testing with security_enable = false
  626. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  627. classname = "HiveServer",
  628. command = "security_status",
  629. config_file="../../2.1/configs/default.json",
  630. hdp_stack_version = self.STACK_VERSION,
  631. target = RMFTestCase.TARGET_COMMON_SERVICES
  632. )
  633. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  634. def test_pre_rolling_restart(self):
  635. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
  636. with open(config_file, "r") as f:
  637. json_content = json.load(f)
  638. version = '2.2.1.0-3242'
  639. json_content['commandParams']['version'] = version
  640. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  641. classname = "HiveServer",
  642. command = "pre_rolling_restart",
  643. config_dict = json_content,
  644. hdp_stack_version = self.STACK_VERSION,
  645. target = RMFTestCase.TARGET_COMMON_SERVICES)
  646. self.assertResourceCalled('Execute',
  647. 'hdp-select set hive-server2 %s' % version,)
  648. self.assertNoMoreResources()