test_hbase_master.py 38 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846
  1. #!/usr/bin/env python
  2. '''
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. '''
  17. import json
  18. from mock.mock import MagicMock, patch
  19. from stacks.utils.RMFTestCase import *
  20. @patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
  21. @patch("os.path.exists", new = MagicMock(return_value=True))
  22. class TestHBaseMaster(RMFTestCase):
  23. COMMON_SERVICES_PACKAGE_DIR = "HBASE/0.96.0.2.0/package"
  24. STACK_VERSION = "2.0.6"
  25. TMP_PATH = "/hadoop"
  26. DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done', '/app-logs', '/tmp']
  27. def test_install_hbase_master_default_no_phx(self):
  28. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
  29. classname = "HbaseMaster",
  30. command = "install",
  31. config_file="hbase_no_phx.json",
  32. hdp_stack_version = self.STACK_VERSION,
  33. target = RMFTestCase.TARGET_COMMON_SERVICES,
  34. try_install=True
  35. )
  36. self.assertResourceCalled('Package', 'hbase_2_3_*',
  37. retry_count=5,
  38. retry_on_repo_unavailability=False)
  39. self.assertNoMoreResources()
  40. def test_install_hbase_master_default_with_phx(self):
  41. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
  42. classname = "HbaseMaster",
  43. command = "install",
  44. config_file="hbase_with_phx.json",
  45. hdp_stack_version = self.STACK_VERSION,
  46. target = RMFTestCase.TARGET_COMMON_SERVICES,
  47. try_install=True
  48. )
  49. self.assertResourceCalled('Package', 'hbase_2_3_*',
  50. retry_count=5,
  51. retry_on_repo_unavailability=False)
  52. self.assertResourceCalled('Package', 'phoenix_2_3_*',
  53. retry_count=5,
  54. retry_on_repo_unavailability=False)
  55. self.assertNoMoreResources()
  56. def test_configure_default(self):
  57. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
  58. classname = "HbaseMaster",
  59. command = "configure",
  60. config_file="default.json",
  61. hdp_stack_version = self.STACK_VERSION,
  62. target = RMFTestCase.TARGET_COMMON_SERVICES
  63. )
  64. self.assert_configure_default()
  65. self.assertNoMoreResources()
  66. def test_start_default(self):
  67. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
  68. classname = "HbaseMaster",
  69. command = "start",
  70. config_file="default.json",
  71. hdp_stack_version = self.STACK_VERSION,
  72. target = RMFTestCase.TARGET_COMMON_SERVICES
  73. )
  74. self.assert_configure_default()
  75. self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf start master',
  76. not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hbase/hbase-hbase-master.pid && ps -p `ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',
  77. user = 'hbase'
  78. )
  79. self.assertNoMoreResources()
  80. def test_stop_default(self):
  81. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
  82. classname = "HbaseMaster",
  83. command = "stop",
  84. config_file="default.json",
  85. hdp_stack_version = self.STACK_VERSION,
  86. target = RMFTestCase.TARGET_COMMON_SERVICES
  87. )
  88. self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf stop master',
  89. only_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hbase/hbase-hbase-master.pid && ps -p `ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',
  90. on_timeout = '! ( ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hbase/hbase-hbase-master.pid && ps -p `ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1 ) || ambari-sudo.sh -H -E kill -9 `ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E cat /var/run/hbase/hbase-hbase-master.pid`',
  91. timeout = 30,
  92. user = 'hbase',
  93. )
  94. self.assertResourceCalled('File', '/var/run/hbase/hbase-hbase-master.pid',
  95. action = ['delete'],
  96. )
  97. self.assertNoMoreResources()
  98. def test_decom_default(self):
  99. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
  100. classname = "HbaseMaster",
  101. command = "decommission",
  102. config_file="default.json",
  103. hdp_stack_version = self.STACK_VERSION,
  104. target = RMFTestCase.TARGET_COMMON_SERVICES
  105. )
  106. self.assertResourceCalled('File', '/usr/lib/hbase/bin/draining_servers.rb',
  107. content = StaticFile('draining_servers.rb'),
  108. mode = 0755,
  109. )
  110. self.assertResourceCalled('Execute', ' /usr/lib/hbase/bin/hbase --config /etc/hbase/conf org.jruby.Main /usr/lib/hbase/bin/draining_servers.rb add host1',
  111. logoutput = True,
  112. user = 'hbase',
  113. )
  114. self.assertResourceCalled('Execute', ' /usr/lib/hbase/bin/hbase --config /etc/hbase/conf org.jruby.Main /usr/lib/hbase/bin/region_mover.rb unload host1',
  115. logoutput = True,
  116. user = 'hbase',
  117. )
  118. self.assertResourceCalled('Execute', ' /usr/lib/hbase/bin/hbase --config /etc/hbase/conf org.jruby.Main /usr/lib/hbase/bin/draining_servers.rb add host2',
  119. logoutput = True,
  120. user = 'hbase',
  121. )
  122. self.assertResourceCalled('Execute', ' /usr/lib/hbase/bin/hbase --config /etc/hbase/conf org.jruby.Main /usr/lib/hbase/bin/region_mover.rb unload host2',
  123. logoutput = True,
  124. user = 'hbase',
  125. )
  126. self.assertNoMoreResources()
  127. def test_decom_default_draining_only(self):
  128. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
  129. classname = "HbaseMaster",
  130. command = "decommission",
  131. config_file="default.hbasedecom.json",
  132. hdp_stack_version = self.STACK_VERSION,
  133. target = RMFTestCase.TARGET_COMMON_SERVICES
  134. )
  135. self.assertResourceCalled('File', '/usr/lib/hbase/bin/draining_servers.rb',
  136. content = StaticFile('draining_servers.rb'),
  137. mode = 0755,
  138. )
  139. self.assertResourceCalled('Execute', ' /usr/lib/hbase/bin/hbase --config /etc/hbase/conf org.jruby.Main /usr/lib/hbase/bin/draining_servers.rb remove host1',
  140. logoutput = True,
  141. user = 'hbase',
  142. )
  143. self.assertNoMoreResources()
  144. def test_configure_secured(self):
  145. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
  146. classname = "HbaseMaster",
  147. command = "configure",
  148. config_file="secured.json",
  149. hdp_stack_version = self.STACK_VERSION,
  150. target = RMFTestCase.TARGET_COMMON_SERVICES
  151. )
  152. self.assert_configure_secured()
  153. self.assertNoMoreResources()
  154. def test_start_secured(self):
  155. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
  156. classname = "HbaseMaster",
  157. command = "start",
  158. config_file="secured.json",
  159. hdp_stack_version = self.STACK_VERSION,
  160. target = RMFTestCase.TARGET_COMMON_SERVICES
  161. )
  162. self.assert_configure_secured()
  163. self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf start master',
  164. not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hbase/hbase-hbase-master.pid && ps -p `ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',
  165. user = 'hbase',
  166. )
  167. self.assertNoMoreResources()
  168. def test_stop_secured(self):
  169. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
  170. classname = "HbaseMaster",
  171. command = "stop",
  172. config_file="secured.json",
  173. hdp_stack_version = self.STACK_VERSION,
  174. target = RMFTestCase.TARGET_COMMON_SERVICES
  175. )
  176. self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf stop master',
  177. only_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hbase/hbase-hbase-master.pid && ps -p `ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',
  178. on_timeout = '! ( ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hbase/hbase-hbase-master.pid && ps -p `ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1 ) || ambari-sudo.sh -H -E kill -9 `ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E cat /var/run/hbase/hbase-hbase-master.pid`',
  179. timeout = 30,
  180. user = 'hbase',
  181. )
  182. self.assertResourceCalled('File', '/var/run/hbase/hbase-hbase-master.pid',
  183. action = ['delete'],
  184. )
  185. self.assertNoMoreResources()
  186. def test_decom_secure(self):
  187. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
  188. classname = "HbaseMaster",
  189. command = "decommission",
  190. config_file="secured.json",
  191. hdp_stack_version = self.STACK_VERSION,
  192. target = RMFTestCase.TARGET_COMMON_SERVICES
  193. )
  194. self.assertResourceCalled('File', '/usr/lib/hbase/bin/draining_servers.rb',
  195. content = StaticFile('draining_servers.rb'),
  196. mode = 0755,
  197. )
  198. self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hbase.service.keytab hbase/c6401.ambari.apache.org@EXAMPLE.COM; /usr/lib/hbase/bin/hbase --config /etc/hbase/conf -Djava.security.auth.login.config=/etc/hbase/conf/hbase_master_jaas.conf org.jruby.Main /usr/lib/hbase/bin/draining_servers.rb add host1',
  199. logoutput = True,
  200. user = 'hbase',
  201. )
  202. self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hbase.service.keytab hbase/c6401.ambari.apache.org@EXAMPLE.COM; /usr/lib/hbase/bin/hbase --config /etc/hbase/conf -Djava.security.auth.login.config=/etc/hbase/conf/hbase_master_jaas.conf org.jruby.Main /usr/lib/hbase/bin/region_mover.rb unload host1',
  203. logoutput = True,
  204. user = 'hbase',
  205. )
  206. self.assertNoMoreResources()
  207. def assert_configure_default(self):
  208. self.assertResourceCalled('Directory', '/etc/hbase',
  209. mode = 0755
  210. )
  211. self.assertResourceCalled('Directory', '/etc/hbase/conf',
  212. owner = 'hbase',
  213. group = 'hadoop',
  214. recursive = True,
  215. )
  216. self.assertResourceCalled('Directory', '/tmp',
  217. recursive = True,
  218. mode = 0777
  219. )
  220. self.assertResourceCalled('Directory', '/hadoop',
  221. recursive = True,
  222. cd_access = 'a',
  223. )
  224. self.assertResourceCalled('Execute', ('chmod', '1777', u'/hadoop'),
  225. sudo = True,
  226. )
  227. self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
  228. owner = 'hbase',
  229. group = 'hadoop',
  230. conf_dir = '/etc/hbase/conf',
  231. configurations = self.getConfig()['configurations']['hbase-site'],
  232. configuration_attributes = self.getConfig()['configuration_attributes']['hbase-site']
  233. )
  234. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  235. owner = 'hbase',
  236. group = 'hadoop',
  237. conf_dir = '/etc/hbase/conf',
  238. configurations = self.getConfig()['configurations']['core-site'],
  239. configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
  240. )
  241. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  242. owner = 'hbase',
  243. group = 'hadoop',
  244. conf_dir = '/etc/hbase/conf',
  245. configurations = self.getConfig()['configurations']['hdfs-site'],
  246. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  247. )
  248. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  249. owner = 'hdfs',
  250. group = 'hadoop',
  251. conf_dir = '/etc/hadoop/conf',
  252. configurations = self.getConfig()['configurations']['hdfs-site'],
  253. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  254. )
  255. self.assertResourceCalled('File', '/etc/hbase/conf/hbase-policy.xml',
  256. owner = 'hbase',
  257. group = 'hadoop'
  258. )
  259. self.assertResourceCalled('File', '/etc/hbase/conf/hbase-env.sh',
  260. owner = 'hbase',
  261. content = InlineTemplate(self.getConfig()['configurations']['hbase-env']['content']),
  262. group = 'hadoop',
  263. )
  264. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  265. owner = 'root',
  266. group = 'root',
  267. recursive = True,
  268. )
  269. self.assertResourceCalled('File', '/etc/security/limits.d/hbase.conf',
  270. content = Template('hbase.conf.j2'),
  271. owner = 'root',
  272. group = 'root',
  273. mode = 0644,
  274. )
  275. self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics2-hbase.properties',
  276. owner = 'hbase',
  277. template_tag = 'GANGLIA-MASTER',
  278. )
  279. self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
  280. owner = 'hbase',
  281. template_tag = None,
  282. )
  283. self.assertResourceCalled('Directory', '/var/run/hbase',
  284. owner = 'hbase',
  285. recursive = True,
  286. mode = 0755,
  287. cd_access = 'a',
  288. )
  289. self.assertResourceCalled('Directory', '/var/log/hbase',
  290. owner = 'hbase',
  291. recursive = True,
  292. mode = 0755,
  293. cd_access = 'a',
  294. )
  295. self.assertResourceCalled('File',
  296. '/etc/hbase/conf/log4j.properties',
  297. mode=0644,
  298. group='hadoop',
  299. owner='hbase',
  300. content='log4jproperties\nline2'
  301. )
  302. self.assertResourceCalled('HdfsResource', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
  303. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  304. security_enabled = False,
  305. hadoop_bin_dir = '/usr/bin',
  306. keytab = UnknownConfigurationMock(),
  307. kinit_path_local = '/usr/bin/kinit',
  308. user = 'hdfs',
  309. dfs_type = '',
  310. owner = 'hbase',
  311. hadoop_conf_dir = '/etc/hadoop/conf',
  312. type = 'directory',
  313. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
  314. )
  315. self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
  316. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  317. security_enabled = False,
  318. hadoop_conf_dir = '/etc/hadoop/conf',
  319. keytab = UnknownConfigurationMock(),
  320. kinit_path_local = '/usr/bin/kinit',
  321. user = 'hdfs',
  322. dfs_type = '',
  323. owner = 'hbase',
  324. hadoop_bin_dir = '/usr/bin',
  325. type = 'directory',
  326. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
  327. mode = 0711,
  328. )
  329. self.assertResourceCalled('HdfsResource', None,
  330. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  331. security_enabled = False,
  332. hadoop_bin_dir = '/usr/bin',
  333. keytab = UnknownConfigurationMock(),
  334. kinit_path_local = '/usr/bin/kinit',
  335. user = 'hdfs',
  336. dfs_type = '',
  337. action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
  338. hadoop_conf_dir = '/etc/hadoop/conf',
  339. )
  340. def assert_configure_secured(self):
  341. self.assertResourceCalled('Directory', '/etc/hbase',
  342. mode = 0755
  343. )
  344. self.assertResourceCalled('Directory', '/etc/hbase/conf',
  345. owner = 'hbase',
  346. group = 'hadoop',
  347. recursive = True,
  348. )
  349. self.assertResourceCalled('Directory', '/tmp',
  350. recursive = True,
  351. mode = 0777
  352. )
  353. self.assertResourceCalled('Directory', '/hadoop',
  354. recursive = True,
  355. cd_access = 'a',
  356. )
  357. self.assertResourceCalled('Execute', ('chmod', '1777', u'/hadoop'),
  358. sudo = True,
  359. )
  360. self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
  361. owner = 'hbase',
  362. group = 'hadoop',
  363. conf_dir = '/etc/hbase/conf',
  364. configurations = self.getConfig()['configurations']['hbase-site'],
  365. configuration_attributes = self.getConfig()['configuration_attributes']['hbase-site']
  366. )
  367. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  368. owner = 'hbase',
  369. group = 'hadoop',
  370. conf_dir = '/etc/hbase/conf',
  371. configurations = self.getConfig()['configurations']['core-site'],
  372. configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
  373. )
  374. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  375. owner = 'hbase',
  376. group = 'hadoop',
  377. conf_dir = '/etc/hbase/conf',
  378. configurations = self.getConfig()['configurations']['hdfs-site'],
  379. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  380. )
  381. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  382. owner = 'hdfs',
  383. group = 'hadoop',
  384. conf_dir = '/etc/hadoop/conf',
  385. configurations = self.getConfig()['configurations']['hdfs-site'],
  386. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  387. )
  388. self.assertResourceCalled('File', '/etc/hbase/conf/hbase-policy.xml',
  389. owner = 'hbase',
  390. group = 'hadoop',
  391. )
  392. self.assertResourceCalled('File', '/etc/hbase/conf/hbase-env.sh',
  393. owner = 'hbase',
  394. content = InlineTemplate(self.getConfig()['configurations']['hbase-env']['content']),
  395. group = 'hadoop',
  396. )
  397. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  398. owner = 'root',
  399. group = 'root',
  400. recursive = True,
  401. )
  402. self.assertResourceCalled('File', '/etc/security/limits.d/hbase.conf',
  403. content = Template('hbase.conf.j2'),
  404. owner = 'root',
  405. group = 'root',
  406. mode = 0644,
  407. )
  408. self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics2-hbase.properties',
  409. owner = 'hbase',
  410. template_tag = 'GANGLIA-MASTER',
  411. )
  412. self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
  413. owner = 'hbase',
  414. template_tag = None,
  415. )
  416. self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase_master_jaas.conf',
  417. owner = 'hbase',
  418. template_tag = None,
  419. )
  420. self.assertResourceCalled('Directory', '/var/run/hbase',
  421. owner = 'hbase',
  422. recursive = True,
  423. mode = 0755,
  424. cd_access = 'a',
  425. )
  426. self.assertResourceCalled('Directory', '/var/log/hbase',
  427. owner = 'hbase',
  428. recursive = True,
  429. mode = 0755,
  430. cd_access = 'a',
  431. )
  432. self.assertResourceCalled('File',
  433. '/etc/hbase/conf/log4j.properties',
  434. mode=0644,
  435. group='hadoop',
  436. owner='hbase',
  437. content='log4jproperties\nline2'
  438. )
  439. self.assertResourceCalled('HdfsResource', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
  440. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  441. security_enabled = True,
  442. hadoop_bin_dir = '/usr/bin',
  443. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  444. kinit_path_local = '/usr/bin/kinit',
  445. user = 'hdfs',
  446. dfs_type = '',
  447. owner = 'hbase',
  448. hadoop_conf_dir = '/etc/hadoop/conf',
  449. type = 'directory',
  450. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
  451. )
  452. self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
  453. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  454. security_enabled = True,
  455. hadoop_conf_dir = '/etc/hadoop/conf',
  456. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  457. kinit_path_local = '/usr/bin/kinit',
  458. user = 'hdfs',
  459. dfs_type = '',
  460. owner = 'hbase',
  461. hadoop_bin_dir = '/usr/bin',
  462. type = 'directory',
  463. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
  464. mode = 0711,
  465. )
  466. self.assertResourceCalled('HdfsResource', None,
  467. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  468. security_enabled = True,
  469. hadoop_bin_dir = '/usr/bin',
  470. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  471. kinit_path_local = '/usr/bin/kinit',
  472. user = 'hdfs',
  473. dfs_type = '',
  474. action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
  475. hadoop_conf_dir = '/etc/hadoop/conf',
  476. )
  477. def test_start_default_22(self):
  478. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
  479. classname = "HbaseMaster",
  480. command = "start",
  481. config_file="hbase-2.2.json",
  482. hdp_stack_version = self.STACK_VERSION,
  483. target = RMFTestCase.TARGET_COMMON_SERVICES)
  484. self.assertResourceCalled('Directory', '/etc/hbase',
  485. mode = 0755)
  486. self.assertResourceCalled('Directory', '/usr/hdp/current/hbase-master/conf',
  487. owner = 'hbase',
  488. group = 'hadoop',
  489. recursive = True)
  490. self.assertResourceCalled('Directory', '/tmp',
  491. recursive = True,
  492. mode = 0777
  493. )
  494. self.assertResourceCalled('Directory', '/hadoop',
  495. recursive = True,
  496. cd_access = 'a',
  497. )
  498. self.assertResourceCalled('Execute', ('chmod', '1777', u'/hadoop'),
  499. sudo = True,
  500. )
  501. self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
  502. owner = 'hbase',
  503. group = 'hadoop',
  504. conf_dir = '/usr/hdp/current/hbase-master/conf',
  505. configurations = self.getConfig()['configurations']['hbase-site'],
  506. configuration_attributes = self.getConfig()['configuration_attributes']['hbase-site'])
  507. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  508. owner = 'hbase',
  509. group = 'hadoop',
  510. conf_dir = '/usr/hdp/current/hbase-master/conf',
  511. configurations = self.getConfig()['configurations']['core-site'],
  512. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'])
  513. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  514. owner = 'hbase',
  515. group = 'hadoop',
  516. conf_dir = '/usr/hdp/current/hbase-master/conf',
  517. configurations = self.getConfig()['configurations']['hdfs-site'],
  518. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site'])
  519. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  520. owner = 'hdfs',
  521. group = 'hadoop',
  522. conf_dir = '/usr/hdp/current/hadoop-client/conf',
  523. configurations = self.getConfig()['configurations']['hdfs-site'],
  524. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site'])
  525. self.assertResourceCalled('XmlConfig', 'hbase-policy.xml',
  526. owner = 'hbase',
  527. group = 'hadoop',
  528. conf_dir = '/usr/hdp/current/hbase-master/conf',
  529. configurations = self.getConfig()['configurations']['hbase-policy'],
  530. configuration_attributes = self.getConfig()['configuration_attributes']['hbase-policy'])
  531. self.assertResourceCalled('File', '/usr/hdp/current/hbase-master/conf/hbase-env.sh',
  532. owner = 'hbase',
  533. content = InlineTemplate(self.getConfig()['configurations']['hbase-env']['content']),
  534. group = 'hadoop'
  535. )
  536. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  537. owner = 'root',
  538. group = 'root',
  539. recursive = True,
  540. )
  541. self.assertResourceCalled('File', '/etc/security/limits.d/hbase.conf',
  542. content = Template('hbase.conf.j2'),
  543. owner = 'root',
  544. group = 'root',
  545. mode = 0644,
  546. )
  547. self.assertResourceCalled('TemplateConfig', '/usr/hdp/current/hbase-master/conf/hadoop-metrics2-hbase.properties',
  548. owner = 'hbase',
  549. template_tag = 'GANGLIA-MASTER')
  550. self.assertResourceCalled('TemplateConfig', '/usr/hdp/current/hbase-master/conf/regionservers',
  551. owner = 'hbase',
  552. template_tag = None)
  553. self.assertResourceCalled('Directory', '/var/run/hbase',
  554. owner = 'hbase',
  555. recursive = True,
  556. mode = 0755,
  557. cd_access = 'a',
  558. )
  559. self.assertResourceCalled('Directory', '/var/log/hbase',
  560. owner = 'hbase',
  561. recursive = True,
  562. mode = 0755,
  563. cd_access = 'a',
  564. )
  565. self.assertResourceCalled('File',
  566. '/usr/hdp/current/hbase-master/conf/log4j.properties',
  567. mode=0644,
  568. group='hadoop',
  569. owner='hbase',
  570. content='log4jproperties\nline2')
  571. self.assertResourceCalled('HdfsResource', 'hdfs://nn1/apps/hbase/data',
  572. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  573. security_enabled = False,
  574. hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
  575. keytab = UnknownConfigurationMock(),
  576. default_fs = 'hdfs://nn1',
  577. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  578. kinit_path_local = '/usr/bin/kinit',
  579. principal_name = UnknownConfigurationMock(),
  580. user = 'hdfs',
  581. dfs_type = '',
  582. owner = 'hbase',
  583. hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
  584. type = 'directory',
  585. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  586. )
  587. self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
  588. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  589. security_enabled = False,
  590. hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
  591. keytab = UnknownConfigurationMock(),
  592. default_fs = 'hdfs://nn1',
  593. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  594. kinit_path_local = '/usr/bin/kinit',
  595. principal_name = UnknownConfigurationMock(),
  596. user = 'hdfs',
  597. dfs_type = '',
  598. owner = 'hbase',
  599. hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
  600. type = 'directory',
  601. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  602. mode = 0711,
  603. )
  604. self.assertResourceCalled('HdfsResource', None,
  605. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  606. security_enabled = False,
  607. hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
  608. keytab = UnknownConfigurationMock(),
  609. default_fs = 'hdfs://nn1',
  610. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  611. kinit_path_local = '/usr/bin/kinit',
  612. principal_name = UnknownConfigurationMock(),
  613. user = 'hdfs',
  614. action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  615. hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
  616. dfs_type = ''
  617. )
  618. self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-master/bin/hbase-daemon.sh --config /usr/hdp/current/hbase-master/conf start master',
  619. not_if = 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hbase/hbase-hbase-master.pid && ps -p `ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',
  620. user = 'hbase')
  621. self.assertNoMoreResources()
  622. @patch("resource_management.libraries.functions.security_commons.build_expectations")
  623. @patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
  624. @patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
  625. @patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
  626. @patch("resource_management.libraries.script.Script.put_structured_out")
  627. def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
  628. # Test that function works when is called with correct parameters
  629. security_params = {
  630. 'hbase-site': {
  631. 'hbase.master.kerberos.principal': '/path/to/hbase_keytab',
  632. 'hbase.master.keytab.file': 'hbase_principal'
  633. }
  634. }
  635. result_issues = []
  636. props_value_check = {"hbase.security.authentication": "kerberos",
  637. "hbase.security.authorization": "true"}
  638. props_empty_check = ["hbase.master.keytab.file",
  639. "hbase.master.kerberos.principal"]
  640. props_read_check = ["hbase.master.keytab.file"]
  641. get_params_mock.return_value = security_params
  642. validate_security_config_mock.return_value = result_issues
  643. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
  644. classname = "HbaseMaster",
  645. command = "security_status",
  646. config_file="secured.json",
  647. hdp_stack_version = self.STACK_VERSION,
  648. target = RMFTestCase.TARGET_COMMON_SERVICES
  649. )
  650. build_exp_mock.assert_called_with('hbase-site', props_value_check, props_empty_check, props_read_check)
  651. put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
  652. cached_kinit_executor_mock.called_with('/usr/bin/kinit',
  653. self.config_dict['configurations']['hbase-env']['hbase_user'],
  654. security_params['hbase-site']['hbase.master.keytab.file'],
  655. security_params['hbase-site']['hbase.master.kerberos.principal'],
  656. self.config_dict['hostname'],
  657. '/tmp')
  658. # Testing that the exception throw by cached_executor is caught
  659. cached_kinit_executor_mock.reset_mock()
  660. cached_kinit_executor_mock.side_effect = Exception("Invalid command")
  661. try:
  662. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
  663. classname = "HbaseMaster",
  664. command = "security_status",
  665. config_file="secured.json",
  666. hdp_stack_version = self.STACK_VERSION,
  667. target = RMFTestCase.TARGET_COMMON_SERVICES
  668. )
  669. except:
  670. self.assertTrue(True)
  671. # Testing with a security_params which doesn't contains hbase-site
  672. empty_security_params = {}
  673. cached_kinit_executor_mock.reset_mock()
  674. get_params_mock.reset_mock()
  675. put_structured_out_mock.reset_mock()
  676. get_params_mock.return_value = empty_security_params
  677. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
  678. classname = "HbaseMaster",
  679. command = "security_status",
  680. config_file="secured.json",
  681. hdp_stack_version = self.STACK_VERSION,
  682. target = RMFTestCase.TARGET_COMMON_SERVICES
  683. )
  684. put_structured_out_mock.assert_called_with({"securityIssuesFound": "Keytab file or principal are not set property."})
  685. # Testing with not empty result_issues
  686. result_issues_with_params = {}
  687. result_issues_with_params['hbase-site']="Something bad happened"
  688. validate_security_config_mock.reset_mock()
  689. get_params_mock.reset_mock()
  690. validate_security_config_mock.return_value = result_issues_with_params
  691. get_params_mock.return_value = security_params
  692. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
  693. classname = "HbaseMaster",
  694. command = "security_status",
  695. config_file="default.json",
  696. hdp_stack_version = self.STACK_VERSION,
  697. target = RMFTestCase.TARGET_COMMON_SERVICES
  698. )
  699. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  700. # Testing with security_enable = false
  701. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
  702. classname = "HbaseMaster",
  703. command = "security_status",
  704. config_file="secured.json",
  705. hdp_stack_version = self.STACK_VERSION,
  706. target = RMFTestCase.TARGET_COMMON_SERVICES
  707. )
  708. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  709. def test_upgrade_backup(self):
  710. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_upgrade.py",
  711. classname = "HbaseMasterUpgrade",
  712. command = "take_snapshot",
  713. config_file="hbase-preupgrade.json",
  714. hdp_stack_version = self.STACK_VERSION,
  715. target = RMFTestCase.TARGET_COMMON_SERVICES)
  716. self.assertResourceCalled('Execute', " echo 'snapshot_all' | /usr/hdp/current/hbase-client/bin/hbase shell",
  717. user = 'hbase')
  718. self.assertNoMoreResources()
  719. @patch("resource_management.core.shell.call")
  720. def test_pre_upgrade_restart(self, call_mock):
  721. call_mock.side_effects = [(0, None), (0, None)]
  722. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
  723. with open(config_file, "r") as f:
  724. json_content = json.load(f)
  725. version = '2.2.1.0-3242'
  726. json_content['commandParams']['version'] = version
  727. mocks_dict = {}
  728. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
  729. classname = "HbaseMaster",
  730. command = "pre_upgrade_restart",
  731. config_dict = json_content,
  732. hdp_stack_version = self.STACK_VERSION,
  733. target = RMFTestCase.TARGET_COMMON_SERVICES,
  734. mocks_dict = mocks_dict)
  735. self.assertResourceCalled('Execute',
  736. ('hdp-select', 'set', 'hbase-master', version), sudo=True,)
  737. self.assertFalse(call_mock.called)
  738. self.assertNoMoreResources()
  739. @patch("resource_management.core.shell.call")
  740. def test_upgrade_23(self, call_mock):
  741. call_mock.side_effects = [(0, None), (0, None)]
  742. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
  743. with open(config_file, "r") as f:
  744. json_content = json.load(f)
  745. version = '2.3.0.0-1234'
  746. json_content['commandParams']['version'] = version
  747. mocks_dict = {}
  748. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
  749. classname = "HbaseMaster",
  750. command = "pre_upgrade_restart",
  751. config_dict = json_content,
  752. hdp_stack_version = self.STACK_VERSION,
  753. target = RMFTestCase.TARGET_COMMON_SERVICES,
  754. call_mocks = [(0, None, ''), (0, None, ''), (0, None, ''), (0, None, '')],
  755. mocks_dict = mocks_dict)
  756. self.assertResourceCalledIgnoreEarlier('Execute', ('hdp-select', 'set', 'hbase-master', version), sudo=True)
  757. self.assertEquals(1, mocks_dict['call'].call_count)
  758. self.assertEquals(3, mocks_dict['checked_call'].call_count)
  759. self.assertEquals(
  760. ('conf-select', 'set-conf-dir', '--package', 'hbase', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
  761. mocks_dict['checked_call'].call_args_list[1][0][0])
  762. self.assertEquals(
  763. ('conf-select', 'create-conf-dir', '--package', 'hbase', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
  764. mocks_dict['call'].call_args_list[0][0][0])