test_hive_server.py 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448
  1. #!/usr/bin/env python
  2. '''
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. '''
  17. import os
  18. import subprocess
  19. from mock.mock import MagicMock, call, patch
  20. from stacks.utils.RMFTestCase import *
  21. import socket
  22. class TestHiveServer(RMFTestCase):
  23. def test_configure_default(self):
  24. self.executeScript("2.0.6/services/HIVE/package/scripts/hive_server.py",
  25. classname = "HiveServer",
  26. command = "configure",
  27. config_file="default.json"
  28. )
  29. self.assert_configure_default()
  30. self.assertNoMoreResources()
  31. @patch.object(subprocess,"Popen")
  32. @patch("socket.socket")
  33. def test_start_default(self, socket_mock, popen_mock):
  34. s = socket_mock.return_value
  35. self.executeScript("2.0.6/services/HIVE/package/scripts/hive_server.py",
  36. classname = "HiveServer",
  37. command = "start",
  38. config_file="default.json"
  39. )
  40. self.assert_configure_default()
  41. self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
  42. not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
  43. environment = {'HADOOP_HOME' : '/usr'},
  44. path = [os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"],
  45. user = 'hive'
  46. )
  47. self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
  48. path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'], tries=5, try_sleep=10
  49. )
  50. self.assertNoMoreResources()
  51. self.assertTrue(popen_mock.called)
  52. popen_mock.assert_called_with(
  53. ['su', '-s', '/bin/bash', '-', u'hive', '-c', "metatool -listFSRoot 2>/dev/null | grep hdfs:// | grep -v '.db$'"],
  54. shell=False, preexec_fn=None, stderr=-2, stdout=-1, env=None, cwd=None
  55. )
  56. self.assertTrue(socket_mock.called)
  57. self.assertTrue(s.close.called)
  58. @patch("socket.socket")
  59. def test_stop_default(self, socket_mock):
  60. self.executeScript("2.0.6/services/HIVE/package/scripts/hive_server.py",
  61. classname = "HiveServer",
  62. command = "stop",
  63. config_file="default.json"
  64. )
  65. self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive-server.pid',
  66. not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)'
  67. )
  68. self.assertNoMoreResources()
  69. self.assertFalse(socket_mock.called)
  70. def test_configure_secured(self):
  71. self.executeScript("2.0.6/services/HIVE/package/scripts/hive_server.py",
  72. classname = "HiveServer",
  73. command = "configure",
  74. config_file="secured.json"
  75. )
  76. self.assert_configure_secured()
  77. self.assertNoMoreResources()
  78. @patch("hive_service.check_fs_root")
  79. @patch("socket.socket")
  80. def test_start_secured(self, socket_mock, check_fs_root_mock):
  81. s = socket_mock.return_value
  82. self.executeScript("2.0.6/services/HIVE/package/scripts/hive_server.py",
  83. classname = "HiveServer",
  84. command = "start",
  85. config_file="secured.json"
  86. )
  87. self.assert_configure_secured()
  88. self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
  89. not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
  90. environment = {'HADOOP_HOME' : '/usr'},
  91. path = [os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"],
  92. user = 'hive'
  93. )
  94. self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
  95. path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'], tries=5, try_sleep=10
  96. )
  97. self.assertNoMoreResources()
  98. self.assertTrue(check_fs_root_mock.called)
  99. self.assertTrue(socket_mock.called)
  100. self.assertTrue(s.close.called)
  101. @patch("socket.socket")
  102. def test_stop_secured(self, socket_mock):
  103. self.executeScript("2.0.6/services/HIVE/package/scripts/hive_server.py",
  104. classname = "HiveServer",
  105. command = "stop",
  106. config_file="secured.json"
  107. )
  108. self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive-server.pid',
  109. not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)'
  110. )
  111. self.assertNoMoreResources()
  112. self.assertFalse(socket_mock.called)
  113. def assert_configure_default(self):
  114. self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
  115. security_enabled = False,
  116. keytab = UnknownConfigurationMock(),
  117. conf_dir = '/etc/hadoop/conf',
  118. hdfs_user = 'hdfs',
  119. kinit_path_local = '/usr/bin/kinit',
  120. mode = 0777,
  121. owner = 'hive',
  122. bin_dir = '/usr/bin',
  123. action = ['create_delayed'],
  124. )
  125. self.assertResourceCalled('HdfsDirectory', '/user/hive',
  126. security_enabled = False,
  127. keytab = UnknownConfigurationMock(),
  128. conf_dir = '/etc/hadoop/conf',
  129. hdfs_user = 'hdfs',
  130. kinit_path_local = '/usr/bin/kinit',
  131. mode = 0700,
  132. owner = 'hive',
  133. bin_dir = '/usr/bin',
  134. action = ['create_delayed'],
  135. )
  136. self.assertResourceCalled('HdfsDirectory', None,
  137. security_enabled = False,
  138. keytab = UnknownConfigurationMock(),
  139. conf_dir = '/etc/hadoop/conf',
  140. hdfs_user = 'hdfs',
  141. kinit_path_local = '/usr/bin/kinit',
  142. bin_dir = '/usr/bin',
  143. action = ['create'],
  144. )
  145. self.assertResourceCalled('Directory', '/etc/hive/conf.server',
  146. owner = 'hive',
  147. group = 'hadoop',
  148. recursive = True,
  149. )
  150. self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
  151. group = 'hadoop',
  152. conf_dir = '/etc/hive/conf.server',
  153. mode = 0644,
  154. configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
  155. owner = 'hive',
  156. configurations = self.getConfig()['configurations']['mapred-site'],
  157. )
  158. self.assertResourceCalled('File', '/etc/hive/conf.server/hive-default.xml.template',
  159. owner = 'hive',
  160. group = 'hadoop',
  161. )
  162. self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh.template',
  163. owner = 'hive',
  164. group = 'hadoop',
  165. )
  166. self.assertResourceCalled('File', '/etc/hive/conf.server/hive-exec-log4j.properties',
  167. content = 'log4jproperties\nline2',
  168. owner = 'hive',
  169. group = 'hadoop',
  170. mode = 0644,
  171. )
  172. self.assertResourceCalled('File', '/etc/hive/conf.server/hive-log4j.properties',
  173. content = 'log4jproperties\nline2',
  174. owner = 'hive',
  175. group = 'hadoop',
  176. mode = 0644,
  177. )
  178. self.assertResourceCalled('Directory', '/etc/hive/conf',
  179. owner = 'hive',
  180. group = 'hadoop',
  181. recursive = True,
  182. )
  183. self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
  184. group = 'hadoop',
  185. conf_dir = '/etc/hive/conf',
  186. mode = 0644,
  187. configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
  188. owner = 'hive',
  189. configurations = self.getConfig()['configurations']['mapred-site'],
  190. )
  191. self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
  192. owner = 'hive',
  193. group = 'hadoop',
  194. )
  195. self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
  196. owner = 'hive',
  197. group = 'hadoop',
  198. )
  199. self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
  200. content = 'log4jproperties\nline2',
  201. owner = 'hive',
  202. group = 'hadoop',
  203. mode = 0644,
  204. )
  205. self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
  206. content = 'log4jproperties\nline2',
  207. owner = 'hive',
  208. group = 'hadoop',
  209. mode = 0644,
  210. )
  211. self.assertResourceCalled('XmlConfig', 'hive-site.xml',
  212. group = 'hadoop',
  213. conf_dir = '/etc/hive/conf.server',
  214. mode = 0644,
  215. configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
  216. owner = 'hive',
  217. configurations = self.getConfig()['configurations']['hive-site'],
  218. )
  219. self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
  220. content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
  221. owner = 'hive',
  222. group = 'hadoop',
  223. )
  224. self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
  225. creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
  226. path = ['/bin', '/usr/bin/'],
  227. environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"},
  228. not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
  229. )
  230. self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
  231. environment = {'no_proxy': 'c6401.ambari.apache.org'},
  232. not_if = '[ -f DBConnectionVerification.jar]',
  233. )
  234. self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
  235. content = Template('startHiveserver2.sh.j2'),
  236. mode = 0755,
  237. )
  238. self.assertResourceCalled('Directory', '/var/run/hive',
  239. owner = 'hive',
  240. group = 'hadoop',
  241. mode = 0755,
  242. recursive = True,
  243. )
  244. self.assertResourceCalled('Directory', '/var/log/hive',
  245. owner = 'hive',
  246. group = 'hadoop',
  247. mode = 0755,
  248. recursive = True,
  249. )
  250. self.assertResourceCalled('Directory', '/var/lib/hive',
  251. owner = 'hive',
  252. group = 'hadoop',
  253. mode = 0755,
  254. recursive = True,
  255. )
  256. def assert_configure_secured(self):
  257. self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
  258. security_enabled = True,
  259. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  260. conf_dir = '/etc/hadoop/conf',
  261. hdfs_user = 'hdfs',
  262. kinit_path_local = '/usr/bin/kinit',
  263. bin_dir = '/usr/bin',
  264. mode = 0777,
  265. owner = 'hive',
  266. action = ['create_delayed'],
  267. )
  268. self.assertResourceCalled('HdfsDirectory', '/user/hive',
  269. security_enabled = True,
  270. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  271. conf_dir = '/etc/hadoop/conf',
  272. hdfs_user = 'hdfs',
  273. kinit_path_local = '/usr/bin/kinit',
  274. mode = 0700,
  275. bin_dir = '/usr/bin',
  276. owner = 'hive',
  277. action = ['create_delayed'],
  278. )
  279. self.assertResourceCalled('HdfsDirectory', None,
  280. security_enabled = True,
  281. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  282. conf_dir = '/etc/hadoop/conf',
  283. hdfs_user = 'hdfs',
  284. bin_dir = '/usr/bin',
  285. kinit_path_local = '/usr/bin/kinit',
  286. action = ['create'],
  287. )
  288. self.assertResourceCalled('Directory', '/etc/hive/conf.server',
  289. owner = 'hive',
  290. group = 'hadoop',
  291. recursive = True,
  292. )
  293. self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
  294. group = 'hadoop',
  295. conf_dir = '/etc/hive/conf.server',
  296. mode = 0644,
  297. configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
  298. owner = 'hive',
  299. configurations = self.getConfig()['configurations']['mapred-site'],
  300. )
  301. self.assertResourceCalled('File', '/etc/hive/conf.server/hive-default.xml.template',
  302. owner = 'hive',
  303. group = 'hadoop',
  304. )
  305. self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh.template',
  306. owner = 'hive',
  307. group = 'hadoop',
  308. )
  309. self.assertResourceCalled('File', '/etc/hive/conf.server/hive-exec-log4j.properties',
  310. content = 'log4jproperties\nline2',
  311. owner = 'hive',
  312. group = 'hadoop',
  313. mode = 0644,
  314. )
  315. self.assertResourceCalled('File', '/etc/hive/conf.server/hive-log4j.properties',
  316. content = 'log4jproperties\nline2',
  317. owner = 'hive',
  318. group = 'hadoop',
  319. mode = 0644,
  320. )
  321. self.assertResourceCalled('Directory', '/etc/hive/conf',
  322. owner = 'hive',
  323. group = 'hadoop',
  324. recursive = True,
  325. )
  326. self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
  327. group = 'hadoop',
  328. conf_dir = '/etc/hive/conf',
  329. mode = 0644,
  330. configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
  331. owner = 'hive',
  332. configurations = self.getConfig()['configurations']['mapred-site'],
  333. )
  334. self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
  335. owner = 'hive',
  336. group = 'hadoop',
  337. )
  338. self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
  339. owner = 'hive',
  340. group = 'hadoop',
  341. )
  342. self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
  343. content = 'log4jproperties\nline2',
  344. owner = 'hive',
  345. group = 'hadoop',
  346. mode = 0644,
  347. )
  348. self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
  349. content = 'log4jproperties\nline2',
  350. owner = 'hive',
  351. group = 'hadoop',
  352. mode = 0644,
  353. )
  354. self.assertResourceCalled('XmlConfig', 'hive-site.xml',
  355. group = 'hadoop',
  356. conf_dir = '/etc/hive/conf.server',
  357. mode = 0644,
  358. configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
  359. owner = 'hive',
  360. configurations = self.getConfig()['configurations']['hive-site'],
  361. )
  362. self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
  363. content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
  364. owner = 'hive',
  365. group = 'hadoop',
  366. )
  367. self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
  368. creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
  369. path = ['/bin', '/usr/bin/'],
  370. environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"},
  371. not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
  372. )
  373. self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
  374. environment = {'no_proxy': 'c6401.ambari.apache.org'},
  375. not_if = '[ -f DBConnectionVerification.jar]',
  376. )
  377. self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
  378. content = Template('startHiveserver2.sh.j2'),
  379. mode = 0755,
  380. )
  381. self.assertResourceCalled('Directory', '/var/run/hive',
  382. owner = 'hive',
  383. group = 'hadoop',
  384. mode = 0755,
  385. recursive = True,
  386. )
  387. self.assertResourceCalled('Directory', '/var/log/hive',
  388. owner = 'hive',
  389. group = 'hadoop',
  390. mode = 0755,
  391. recursive = True,
  392. )
  393. self.assertResourceCalled('Directory', '/var/lib/hive',
  394. owner = 'hive',
  395. group = 'hadoop',
  396. mode = 0755,
  397. recursive = True,
  398. )
  399. @patch("hive_service.check_fs_root")
  400. @patch("time.time")
  401. @patch("socket.socket")
  402. def test_socket_timeout(self, socket_mock, time_mock, check_fs_root_mock):
  403. s = socket_mock.return_value
  404. s.connect = MagicMock()
  405. s.connect.side_effect = socket.error("")
  406. time_mock.side_effect = [0, 1000, 2000, 3000, 4000]
  407. try:
  408. self.executeScript("2.0.6/services/HIVE/package/scripts/hive_server.py",
  409. classname = "HiveServer",
  410. command = "start",
  411. config_file="default.json"
  412. )
  413. self.fail("Script failure due to socket error was expected")
  414. except:
  415. self.assert_configure_default()
  416. self.assertFalse(socket_mock.called)
  417. self.assertFalse(s.close.called)