test_hive_server.py 44 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870
  1. #!/usr/bin/env python
  2. '''
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. '''
  17. import json
  18. import socket
  19. import subprocess
  20. from mock.mock import MagicMock, patch
  21. from resource_management.libraries.functions import version
  22. from resource_management.core import shell
  23. from resource_management.libraries.script.script import Script
  24. from stacks.utils.RMFTestCase import *
  25. from resource_management.libraries import functions
  26. @patch.object(functions, "get_hdp_version", new = MagicMock(return_value="2.0.0.0-1234"))
  27. @patch("resource_management.libraries.functions.check_thrift_port_sasl", new=MagicMock())
  28. class TestHiveServer(RMFTestCase):
  29. COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
  30. STACK_VERSION = "2.0.6"
  31. UPGRADE_STACK_VERSION = "2.2"
  32. @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
  33. def test_configure_default(self):
  34. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  35. classname = "HiveServer",
  36. command = "configure",
  37. config_file="default.json",
  38. hdp_stack_version = self.STACK_VERSION,
  39. target = RMFTestCase.TARGET_COMMON_SERVICES
  40. )
  41. self.assert_configure_default()
  42. self.assertNoMoreResources()
  43. @patch("socket.socket")
  44. @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
  45. def test_start_default(self, socket_mock):
  46. s = socket_mock.return_value
  47. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  48. classname="HiveServer",
  49. command="start",
  50. config_file="default.json",
  51. hdp_stack_version=self.STACK_VERSION,
  52. target=RMFTestCase.TARGET_COMMON_SERVICES
  53. )
  54. self.assert_configure_default()
  55. self.assertResourceCalled('Execute',
  56. 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
  57. environment={'PATH': '/bin:/usr/lib/hive/bin:/usr/bin'},
  58. user='hive'
  59. )
  60. self.assertResourceCalled('Execute',
  61. '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
  62. environment={'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': 'hive'},
  63. not_if='ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
  64. user='hive',
  65. path=['/bin:/usr/lib/hive/bin:/usr/bin']
  66. )
  67. self.assertResourceCalled('Execute',
  68. '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/lib/hive/lib//mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
  69. path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
  70. tries=5,
  71. try_sleep=10
  72. )
  73. self.assertNoMoreResources()
  74. @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
  75. def test_start_default_no_copy(self):
  76. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  77. classname = "HiveServer",
  78. command = "start",
  79. config_file="default_no_install.json",
  80. hdp_stack_version = self.STACK_VERSION,
  81. target = RMFTestCase.TARGET_COMMON_SERVICES
  82. )
  83. self.assert_configure_default()
  84. self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
  85. environment = {'PATH': '/bin:/usr/lib/hive/bin:/usr/bin'},
  86. user = 'hive',
  87. )
  88. self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
  89. not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
  90. environment={'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': 'hive'},
  91. path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
  92. user = 'hive'
  93. )
  94. self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/lib/hive/lib//mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
  95. path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'], tries=5, try_sleep=10
  96. )
  97. self.assertNoMoreResources()
  98. @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
  99. def test_start_default_alt_tmp(self):
  100. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  101. classname = "HiveServer",
  102. command = "start",
  103. config_file="default_hive_nn_ha.json",
  104. hdp_stack_version = self.STACK_VERSION,
  105. target = RMFTestCase.TARGET_COMMON_SERVICES
  106. )
  107. self.assert_configure_default(no_tmp=True)
  108. self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
  109. environment = {'PATH': '/bin:/usr/lib/hive/bin:/usr/bin'},
  110. user = 'hive',
  111. )
  112. self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
  113. not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
  114. environment={'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': 'hive'},
  115. path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
  116. user = 'hive'
  117. )
  118. self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/lib/hive/lib//mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
  119. path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'], tries=5, try_sleep=10
  120. )
  121. self.assertNoMoreResources()
  122. @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
  123. def test_start_default_alt_nn_ha_tmp(self):
  124. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  125. classname = "HiveServer",
  126. command = "start",
  127. config_file="default_hive_nn_ha_2.json",
  128. hdp_stack_version = self.STACK_VERSION,
  129. target = RMFTestCase.TARGET_COMMON_SERVICES
  130. )
  131. self.assert_configure_default(no_tmp=True)
  132. self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
  133. environment = {'PATH': '/bin:/usr/lib/hive/bin:/usr/bin'},
  134. user = 'hive',
  135. )
  136. self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
  137. not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
  138. environment={'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': 'hive'},
  139. path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
  140. user = 'hive'
  141. )
  142. self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/lib/hive/lib//mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
  143. path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'], tries=5, try_sleep=10
  144. )
  145. self.assertNoMoreResources()
  146. @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
  147. def test_stop_default(self):
  148. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  149. classname = "HiveServer",
  150. command = "stop",
  151. config_file="default.json",
  152. hdp_stack_version = self.STACK_VERSION,
  153. target = RMFTestCase.TARGET_COMMON_SERVICES
  154. )
  155. self.assertResourceCalled('Execute', 'ambari-sudo.sh kill `cat /var/run/hive/hive-server.pid`',
  156. not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)',
  157. )
  158. self.assertResourceCalled('Execute', 'ambari-sudo.sh kill -9 `cat /var/run/hive/hive-server.pid`',
  159. not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1) || ( sleep 5 && ! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1) )',
  160. )
  161. self.assertResourceCalled('Execute', '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)',
  162. tries = 20,
  163. try_sleep = 3,
  164. )
  165. self.assertResourceCalled('File', '/var/run/hive/hive-server.pid',
  166. action = ['delete'],
  167. )
  168. self.assertNoMoreResources()
  169. @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
  170. def test_configure_secured(self):
  171. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  172. classname = "HiveServer",
  173. command = "configure",
  174. config_file="secured.json",
  175. hdp_stack_version = self.STACK_VERSION,
  176. target = RMFTestCase.TARGET_COMMON_SERVICES
  177. )
  178. self.assert_configure_secured()
  179. self.assertNoMoreResources()
  180. @patch("hive_service.check_fs_root")
  181. @patch("socket.socket")
  182. @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
  183. def test_start_secured(self, socket_mock, check_fs_root_mock):
  184. s = socket_mock.return_value
  185. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  186. classname = "HiveServer",
  187. command = "start",
  188. config_file="secured.json",
  189. hdp_stack_version = self.STACK_VERSION,
  190. target = RMFTestCase.TARGET_COMMON_SERVICES
  191. )
  192. self.assert_configure_secured()
  193. self.assertResourceCalled('Execute',
  194. '/usr/bin/kinit -kt /etc/security/keytabs/hive.service.keytab hive/c6401.ambari.apache.org@EXAMPLE.COM; ',
  195. user='hive',
  196. )
  197. self.assertResourceCalled('Execute',
  198. '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
  199. environment={'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': 'hive'},
  200. not_if='ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
  201. user='hive',
  202. path=['/bin:/usr/lib/hive/bin:/usr/bin'],
  203. )
  204. self.assertResourceCalled('Execute',
  205. '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/lib/hive/lib//mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
  206. path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
  207. tries=5,
  208. try_sleep=10,
  209. )
  210. self.assertNoMoreResources()
  211. self.assertTrue(check_fs_root_mock.called)
  212. @patch("socket.socket")
  213. @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
  214. def test_stop_secured(self, socket_mock):
  215. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  216. classname = "HiveServer",
  217. command = "stop",
  218. config_file="secured.json",
  219. hdp_stack_version = self.STACK_VERSION,
  220. target = RMFTestCase.TARGET_COMMON_SERVICES
  221. )
  222. self.assertResourceCalled('Execute', 'ambari-sudo.sh kill `cat /var/run/hive/hive-server.pid`',
  223. not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)',
  224. )
  225. self.assertResourceCalled('Execute', 'ambari-sudo.sh kill -9 `cat /var/run/hive/hive-server.pid`',
  226. not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1) || ( sleep 5 && ! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1) )',
  227. )
  228. self.assertResourceCalled('Execute', '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)',
  229. tries = 20,
  230. try_sleep = 3,
  231. )
  232. self.assertResourceCalled('File', '/var/run/hive/hive-server.pid',
  233. action = ['delete'],
  234. )
  235. self.assertNoMoreResources()
  236. def assert_configure_default(self, no_tmp = False):
  237. # Verify creating of Hcat and Hive directories
  238. self.assertResourceCalled('HdfsResource', '/apps/webhcat',
  239. security_enabled = False,
  240. hadoop_bin_dir = '/usr/bin',
  241. keytab = UnknownConfigurationMock(),
  242. kinit_path_local = '/usr/bin/kinit',
  243. user = 'hdfs',
  244. owner = 'hcat',
  245. hadoop_conf_dir = '/etc/hadoop/conf',
  246. type = 'directory',
  247. action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
  248. mode = 0755,
  249. )
  250. self.assertResourceCalled('HdfsResource', '/user/hcat',
  251. security_enabled = False,
  252. hadoop_bin_dir = '/usr/bin',
  253. keytab = UnknownConfigurationMock(),
  254. kinit_path_local = '/usr/bin/kinit',
  255. user = 'hdfs',
  256. owner = 'hcat',
  257. hadoop_conf_dir = '/etc/hadoop/conf',
  258. type = 'directory',
  259. action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
  260. mode = 0755,
  261. )
  262. self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
  263. security_enabled = False,
  264. hadoop_bin_dir = '/usr/bin',
  265. keytab = UnknownConfigurationMock(),
  266. kinit_path_local = '/usr/bin/kinit',
  267. user = 'hdfs',
  268. owner = 'hive',
  269. hadoop_conf_dir = '/etc/hadoop/conf',
  270. type = 'directory',
  271. action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
  272. mode = 0777,
  273. )
  274. self.assertResourceCalled('HdfsResource', '/user/hive',
  275. security_enabled = False,
  276. hadoop_bin_dir = '/usr/bin',
  277. keytab = UnknownConfigurationMock(),
  278. kinit_path_local = '/usr/bin/kinit',
  279. user = 'hdfs',
  280. owner = 'hive',
  281. hadoop_conf_dir = '/etc/hadoop/conf',
  282. type = 'directory',
  283. action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
  284. mode = 0700,
  285. )
  286. if not no_tmp:
  287. self.assertResourceCalled('HdfsResource', '/custompath/tmp/hive',
  288. security_enabled = False,
  289. hadoop_conf_dir = '/etc/hadoop/conf',
  290. keytab = UnknownConfigurationMock(),
  291. kinit_path_local = '/usr/bin/kinit',
  292. user = 'hdfs',
  293. owner = 'hive',
  294. group = 'hdfs',
  295. hadoop_bin_dir = '/usr/bin',
  296. type = 'directory',
  297. action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
  298. mode = 0777,
  299. )
  300. self.assertResourceCalled('HdfsResource', None,
  301. security_enabled = False,
  302. hadoop_bin_dir = '/usr/bin',
  303. keytab = UnknownConfigurationMock(),
  304. kinit_path_local = '/usr/bin/kinit',
  305. user = 'hdfs',
  306. action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
  307. hadoop_conf_dir = '/etc/hadoop/conf',
  308. )
  309. self.assertResourceCalled('Directory', '/etc/hive',
  310. mode=0755,
  311. )
  312. self.assertResourceCalled('Directory', '/etc/hive/conf',
  313. owner='hive',
  314. group='hadoop',
  315. recursive=True,
  316. )
  317. self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
  318. group='hadoop',
  319. conf_dir='/etc/hive/conf',
  320. mode=0644,
  321. configuration_attributes={u'final': {u'mapred.healthChecker.script.path': u'true',
  322. u'mapreduce.jobtracker.staging.root.dir': u'true'}},
  323. owner='hive',
  324. configurations=self.getConfig()['configurations']['mapred-site'],
  325. )
  326. self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
  327. owner='hive',
  328. group='hadoop',
  329. )
  330. self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
  331. owner='hive',
  332. group='hadoop',
  333. )
  334. self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
  335. content='log4jproperties\nline2',
  336. owner='hive',
  337. group='hadoop',
  338. mode=0644,
  339. )
  340. self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
  341. content='log4jproperties\nline2',
  342. owner='hive',
  343. group='hadoop',
  344. mode=0644,
  345. )
  346. self.assertResourceCalled('XmlConfig', 'hive-site.xml',
  347. group='hadoop',
  348. conf_dir='/etc/hive/conf.server',
  349. mode=0644,
  350. configuration_attributes={u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
  351. u'javax.jdo.option.ConnectionDriverName': u'true',
  352. u'javax.jdo.option.ConnectionPassword': u'true'}},
  353. owner='hive',
  354. configurations=self.getConfig()['configurations']['hive-site'],
  355. )
  356. self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
  357. content=InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
  358. owner='hive',
  359. group='hadoop',
  360. )
  361. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  362. owner='root',
  363. group='root',
  364. recursive=True,
  365. )
  366. self.assertResourceCalled('File', '/etc/security/limits.d/hive.conf',
  367. content=Template('hive.conf.j2'),
  368. owner='root',
  369. group='root',
  370. mode=0644,
  371. )
  372. self.assertResourceCalled('Execute', ('cp',
  373. '--remove-destination',
  374. '/usr/share/java/mysql-connector-java.jar',
  375. '/usr/lib/hive/lib//mysql-connector-java.jar'),
  376. path=['/bin', '/usr/bin/'],
  377. sudo=True,
  378. )
  379. self.assertResourceCalled('File', '/usr/lib/hive/lib//mysql-connector-java.jar',
  380. mode=0644,
  381. )
  382. self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar',
  383. content=DownloadSource('http://c6401.ambari.apache.org:8080/resources'
  384. '/DBConnectionVerification.jar'),
  385. )
  386. self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
  387. content=Template('startHiveserver2.sh.j2'),
  388. mode=0755,
  389. )
  390. self.assertResourceCalled('Directory', '/var/run/hive',
  391. owner='hive',
  392. mode=0755,
  393. group='hadoop',
  394. recursive=True,
  395. cd_access='a',
  396. )
  397. self.assertResourceCalled('Directory', '/var/log/hive',
  398. owner='hive',
  399. mode=0755,
  400. group='hadoop',
  401. recursive=True,
  402. cd_access='a',
  403. )
  404. self.assertResourceCalled('Directory', '/var/lib/hive',
  405. owner='hive',
  406. mode=0755,
  407. group='hadoop',
  408. recursive=True,
  409. cd_access='a',
  410. )
  411. def assert_configure_secured(self):
  412. self.assertResourceCalled('HdfsResource', '/apps/webhcat',
  413. security_enabled = True,
  414. hadoop_bin_dir = '/usr/bin',
  415. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  416. kinit_path_local = '/usr/bin/kinit',
  417. user = 'hdfs',
  418. owner = 'hcat',
  419. hadoop_conf_dir = '/etc/hadoop/conf',
  420. type = 'directory',
  421. action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
  422. mode = 0755,
  423. )
  424. self.assertResourceCalled('HdfsResource', '/user/hcat',
  425. security_enabled = True,
  426. hadoop_bin_dir = '/usr/bin',
  427. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  428. kinit_path_local = '/usr/bin/kinit',
  429. user = 'hdfs',
  430. owner = 'hcat',
  431. hadoop_conf_dir = '/etc/hadoop/conf',
  432. type = 'directory',
  433. action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
  434. mode = 0755,
  435. )
  436. self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
  437. security_enabled = True,
  438. hadoop_bin_dir = '/usr/bin',
  439. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  440. kinit_path_local = '/usr/bin/kinit',
  441. user = 'hdfs',
  442. owner = 'hive',
  443. hadoop_conf_dir = '/etc/hadoop/conf',
  444. type = 'directory',
  445. action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
  446. mode = 0777,
  447. )
  448. self.assertResourceCalled('HdfsResource', '/user/hive',
  449. security_enabled = True,
  450. hadoop_bin_dir = '/usr/bin',
  451. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  452. kinit_path_local = '/usr/bin/kinit',
  453. user = 'hdfs',
  454. owner = 'hive',
  455. hadoop_conf_dir = '/etc/hadoop/conf',
  456. type = 'directory',
  457. action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
  458. mode = 0700,
  459. )
  460. self.assertResourceCalled('HdfsResource', '/custompath/tmp/hive',
  461. security_enabled = True,
  462. hadoop_conf_dir = '/etc/hadoop/conf',
  463. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  464. kinit_path_local = '/usr/bin/kinit',
  465. user = 'hdfs',
  466. owner = 'hive',
  467. group = 'hdfs',
  468. hadoop_bin_dir = '/usr/bin',
  469. type = 'directory',
  470. action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
  471. mode = 0777,
  472. )
  473. self.assertResourceCalled('HdfsResource', None,
  474. security_enabled = True,
  475. hadoop_bin_dir = '/usr/bin',
  476. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  477. kinit_path_local = '/usr/bin/kinit',
  478. user = 'hdfs',
  479. action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
  480. hadoop_conf_dir = '/etc/hadoop/conf',
  481. )
  482. self.assertResourceCalled('Directory', '/etc/hive',
  483. mode=0755,
  484. )
  485. self.assertResourceCalled('Directory', '/etc/hive/conf',
  486. owner='hive',
  487. group='hadoop',
  488. recursive=True,
  489. )
  490. self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
  491. group='hadoop',
  492. conf_dir='/etc/hive/conf',
  493. mode=0644,
  494. configuration_attributes={u'final': {u'mapred.healthChecker.script.path': u'true',
  495. u'mapreduce.jobtracker.staging.root.dir': u'true'}},
  496. owner='hive',
  497. configurations=self.getConfig()['configurations']['mapred-site'],
  498. )
  499. self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
  500. owner='hive',
  501. group='hadoop',
  502. )
  503. self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
  504. owner='hive',
  505. group='hadoop',
  506. )
  507. self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
  508. content='log4jproperties\nline2',
  509. owner='hive',
  510. group='hadoop',
  511. mode=0644,
  512. )
  513. self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
  514. content='log4jproperties\nline2',
  515. owner='hive',
  516. group='hadoop',
  517. mode=0644,
  518. )
  519. self.assertResourceCalled('XmlConfig', 'hive-site.xml',
  520. group='hadoop',
  521. conf_dir='/etc/hive/conf.server',
  522. mode=0644,
  523. configuration_attributes={u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
  524. u'javax.jdo.option.ConnectionDriverName': u'true',
  525. u'javax.jdo.option.ConnectionPassword': u'true'}},
  526. owner='hive',
  527. configurations=self.getConfig()['configurations']['hive-site'],
  528. )
  529. self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
  530. content=InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
  531. owner='hive',
  532. group='hadoop',
  533. )
  534. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  535. owner='root',
  536. group='root',
  537. recursive=True,
  538. )
  539. self.assertResourceCalled('File', '/etc/security/limits.d/hive.conf',
  540. content=Template('hive.conf.j2'),
  541. owner='root',
  542. group='root',
  543. mode=0644,
  544. )
  545. self.assertResourceCalled('Execute', ('cp',
  546. '--remove-destination',
  547. '/usr/share/java/mysql-connector-java.jar',
  548. '/usr/lib/hive/lib//mysql-connector-java.jar'),
  549. path=['/bin', '/usr/bin/'],
  550. sudo=True,
  551. )
  552. self.assertResourceCalled('File', '/usr/lib/hive/lib//mysql-connector-java.jar',
  553. mode=0644,
  554. )
  555. self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar',
  556. content=DownloadSource(
  557. 'http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'),
  558. )
  559. self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
  560. content=Template('startHiveserver2.sh.j2'),
  561. mode=0755,
  562. )
  563. self.assertResourceCalled('Directory', '/var/run/hive',
  564. owner='hive',
  565. group='hadoop',
  566. mode=0755,
  567. recursive=True,
  568. cd_access='a',
  569. )
  570. self.assertResourceCalled('Directory', '/var/log/hive',
  571. owner='hive',
  572. group='hadoop',
  573. mode=0755,
  574. recursive=True,
  575. cd_access='a',
  576. )
  577. self.assertResourceCalled('Directory', '/var/lib/hive',
  578. owner='hive',
  579. group='hadoop',
  580. mode=0755,
  581. recursive=True,
  582. cd_access='a',
  583. )
  584. @patch("hive_service.check_fs_root")
  585. @patch("time.time")
  586. @patch("socket.socket")
  587. def test_socket_timeout(self, socket_mock, time_mock, check_fs_root_mock):
  588. s = socket_mock.return_value
  589. s.connect = MagicMock()
  590. s.connect.side_effect = socket.error("")
  591. time_mock.side_effect = [0, 1000, 2000, 3000, 4000]
  592. try:
  593. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  594. classname = "HiveServer",
  595. command = "start",
  596. config_file="default.json",
  597. hdp_stack_version = self.STACK_VERSION,
  598. target = RMFTestCase.TARGET_COMMON_SERVICES
  599. )
  600. self.fail("Script failure due to socket error was expected")
  601. except:
  602. self.assert_configure_default()
  603. @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=True))
  604. def test_stop_during_upgrade(self):
  605. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  606. classname = "HiveServer", command = "restart", config_file = "hive-upgrade.json",
  607. hdp_stack_version = self.UPGRADE_STACK_VERSION,
  608. target = RMFTestCase.TARGET_COMMON_SERVICES,
  609. call_mocks = [(0,"hive-server2 - 2.2.0.0-2041"), (0,"hive-server2 - 2.2.0.0-2041")]
  610. )
  611. self.assertResourceCalled('Execute', 'hive --config /usr/hdp/current/hive-server2/conf/conf.server --service hiveserver2 --deregister 2.2.0.0-2041',
  612. path=['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/current/hadoop-client/bin'],
  613. tries=1, user='hive')
  614. self.assertResourceCalled('Execute', 'hdp-select set hive-server2 2.2.1.0-2065',)
  615. def test_stop_during_upgrade_bad_hive_version(self):
  616. try:
  617. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  618. classname = "HiveServer", command = "restart", config_file = "hive-upgrade.json",
  619. hdp_stack_version = self.UPGRADE_STACK_VERSION,
  620. target = RMFTestCase.TARGET_COMMON_SERVICES,
  621. call_mocks = [(0,"BAD VERSION")])
  622. self.fail("Invalid hive version should have caused an exception")
  623. except:
  624. pass
  625. self.assertNoMoreResources()
  626. @patch("resource_management.libraries.functions.security_commons.build_expectations")
  627. @patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
  628. @patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
  629. @patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
  630. @patch("resource_management.libraries.script.Script.put_structured_out")
  631. def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
  632. # Test that function works when is called with correct parameters
  633. security_params = {
  634. 'hive-site': {
  635. "hive.server2.authentication": "KERBEROS",
  636. "hive.metastore.sasl.enabled": "true",
  637. "hive.security.authorization.enabled": "true",
  638. "hive.server2.authentication.kerberos.keytab": "path/to/keytab",
  639. "hive.server2.authentication.kerberos.principal": "principal",
  640. "hive.server2.authentication.spnego.keytab": "path/to/spnego_keytab",
  641. "hive.server2.authentication.spnego.principal": "spnego_principal"
  642. }
  643. }
  644. result_issues = []
  645. props_value_check = {"hive.server2.authentication": "KERBEROS",
  646. "hive.metastore.sasl.enabled": "true",
  647. "hive.security.authorization.enabled": "true"}
  648. props_empty_check = ["hive.server2.authentication.kerberos.keytab",
  649. "hive.server2.authentication.kerberos.principal",
  650. "hive.server2.authentication.spnego.principal",
  651. "hive.server2.authentication.spnego.keytab"]
  652. props_read_check = ["hive.server2.authentication.kerberos.keytab",
  653. "hive.server2.authentication.spnego.keytab"]
  654. get_params_mock.return_value = security_params
  655. validate_security_config_mock.return_value = result_issues
  656. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  657. classname = "HiveServer",
  658. command = "security_status",
  659. config_file="../../2.1/configs/secured.json",
  660. hdp_stack_version = self.STACK_VERSION,
  661. target = RMFTestCase.TARGET_COMMON_SERVICES
  662. )
  663. get_params_mock.assert_called_with('/etc/hive/conf', {'hive-site.xml': "XML"})
  664. build_exp_mock.assert_called_with('hive-site', props_value_check, props_empty_check, props_read_check)
  665. put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
  666. self.assertTrue(cached_kinit_executor_mock.call_count, 2)
  667. cached_kinit_executor_mock.assert_called_with('/usr/bin/kinit',
  668. self.config_dict['configurations']['hive-env']['hive_user'],
  669. security_params['hive-site']['hive.server2.authentication.spnego.keytab'],
  670. security_params['hive-site']['hive.server2.authentication.spnego.principal'],
  671. self.config_dict['hostname'],
  672. '/tmp')
  673. # Testing that the exception throw by cached_executor is caught
  674. cached_kinit_executor_mock.reset_mock()
  675. cached_kinit_executor_mock.side_effect = Exception("Invalid command")
  676. try:
  677. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  678. classname = "HiveServer",
  679. command = "security_status",
  680. config_file="../../2.1/configs/secured.json",
  681. hdp_stack_version = self.STACK_VERSION,
  682. target = RMFTestCase.TARGET_COMMON_SERVICES
  683. )
  684. except:
  685. self.assertTrue(True)
  686. # Testing with a security_params which doesn't contains startup
  687. empty_security_params = {}
  688. cached_kinit_executor_mock.reset_mock()
  689. get_params_mock.reset_mock()
  690. put_structured_out_mock.reset_mock()
  691. get_params_mock.return_value = empty_security_params
  692. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  693. classname = "HiveServer",
  694. command = "security_status",
  695. config_file="../../2.1/configs/secured.json",
  696. hdp_stack_version = self.STACK_VERSION,
  697. target = RMFTestCase.TARGET_COMMON_SERVICES
  698. )
  699. put_structured_out_mock.assert_called_with({"securityIssuesFound": "Keytab file or principal are not set property."})
  700. # Testing with not empty result_issues
  701. result_issues_with_params = {}
  702. result_issues_with_params['hive-site']="Something bad happened"
  703. validate_security_config_mock.reset_mock()
  704. get_params_mock.reset_mock()
  705. validate_security_config_mock.return_value = result_issues_with_params
  706. get_params_mock.return_value = security_params
  707. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  708. classname = "HiveServer",
  709. command = "security_status",
  710. config_file="../../2.1/configs/secured.json",
  711. hdp_stack_version = self.STACK_VERSION,
  712. target = RMFTestCase.TARGET_COMMON_SERVICES
  713. )
  714. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  715. # Testing with security_enable = false
  716. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  717. classname = "HiveServer",
  718. command = "security_status",
  719. config_file="../../2.1/configs/default.json",
  720. hdp_stack_version = self.STACK_VERSION,
  721. target = RMFTestCase.TARGET_COMMON_SERVICES
  722. )
  723. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  724. @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=True))
  725. @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
  726. def test_pre_rolling_restart(self, copy_to_hdfs_mock):
  727. copy_to_hdfs_mock.return_value = True
  728. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
  729. with open(config_file, "r") as f:
  730. json_content = json.load(f)
  731. version = '2.2.1.0-3242'
  732. json_content['commandParams']['version'] = version
  733. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  734. classname = "HiveServer",
  735. command = "pre_rolling_restart",
  736. config_dict = json_content,
  737. hdp_stack_version = self.STACK_VERSION,
  738. target = RMFTestCase.TARGET_COMMON_SERVICES)
  739. self.assertResourceCalled('Execute',
  740. 'hdp-select set hive-server2 %s' % version,)
  741. copy_to_hdfs_mock.assert_any_call("mapreduce", "hadoop", "hdfs")
  742. copy_to_hdfs_mock.assert_any_call("tez", "hadoop", "hdfs")
  743. self.assertEquals(2, copy_to_hdfs_mock.call_count)
  744. self.assertResourceCalled('HdfsResource', None,
  745. security_enabled = False,
  746. hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
  747. keytab = UnknownConfigurationMock(),
  748. kinit_path_local = '/usr/bin/kinit',
  749. user = 'hdfs',
  750. action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
  751. hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
  752. )
  753. self.assertNoMoreResources()
  754. @patch("resource_management.core.shell.call")
  755. @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=True))
  756. @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
  757. def test_pre_rolling_restart_23(self, copy_to_hdfs_mock, call_mock):
  758. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
  759. with open(config_file, "r") as f:
  760. json_content = json.load(f)
  761. version = '2.3.0.0-1234'
  762. json_content['commandParams']['version'] = version
  763. copy_to_hdfs_mock.return_value = True
  764. mocks_dict = {}
  765. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
  766. classname = "HiveServer",
  767. command = "pre_rolling_restart",
  768. config_dict = json_content,
  769. hdp_stack_version = self.STACK_VERSION,
  770. target = RMFTestCase.TARGET_COMMON_SERVICES,
  771. call_mocks = [(0, None), (0, None)],
  772. mocks_dict = mocks_dict)
  773. self.assertResourceCalled('Execute',
  774. 'hdp-select set hive-server2 %s' % version,)
  775. copy_to_hdfs_mock.assert_any_call("mapreduce", "hadoop", "hdfs")
  776. copy_to_hdfs_mock.assert_any_call("tez", "hadoop", "hdfs")
  777. self.assertEquals(2, copy_to_hdfs_mock.call_count)
  778. self.assertResourceCalled('HdfsResource', None,
  779. security_enabled = False,
  780. hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
  781. keytab = UnknownConfigurationMock(),
  782. kinit_path_local = '/usr/bin/kinit',
  783. user = 'hdfs',
  784. action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
  785. hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
  786. )
  787. self.assertNoMoreResources()
  788. self.assertEquals(2, mocks_dict['call'].call_count)
  789. self.assertEquals(
  790. "conf-select create-conf-dir --package hive --stack-version 2.3.0.0-1234 --conf-version 0",
  791. mocks_dict['call'].call_args_list[0][0][0])
  792. self.assertEquals(
  793. "conf-select set-conf-dir --package hive --stack-version 2.3.0.0-1234 --conf-version 0",
  794. mocks_dict['call'].call_args_list[1][0][0])