test_namenode.py 66 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282
  1. #!/usr/bin/env python
  2. '''
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. from ambari_commons import OSCheck
  17. '''
  18. import json
  19. import os
  20. import tempfile
  21. from stacks.utils.RMFTestCase import *
  22. from mock.mock import MagicMock, patch
  23. import resource_management
  24. from resource_management.core import shell
  25. from resource_management.core.exceptions import Fail
  26. class TestNamenode(RMFTestCase):
  27. COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package"
  28. STACK_VERSION = "2.0.6"
  29. def test_configure_default(self):
  30. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  31. classname = "NameNode",
  32. command = "configure",
  33. config_file = "default.json",
  34. hdp_stack_version = self.STACK_VERSION,
  35. target = RMFTestCase.TARGET_COMMON_SERVICES
  36. )
  37. self.assert_configure_default()
  38. self.assertNoMoreResources()
  39. def test_start_default_alt_fs(self):
  40. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  41. classname = "NameNode",
  42. command = "start",
  43. config_file = "altfs_plus_hdfs.json",
  44. hdp_stack_version = self.STACK_VERSION,
  45. target = RMFTestCase.TARGET_COMMON_SERVICES,
  46. call_mocks = [(5,"")],
  47. )
  48. self.assert_configure_default()
  49. self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
  50. self.assertResourceCalled('Execute', 'yes Y | hdfs --config /etc/hadoop/conf namenode -format',
  51. path = ['/usr/bin'],
  52. user = 'hdfs',
  53. )
  54. self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
  55. recursive = True,
  56. )
  57. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  58. owner = 'hdfs',
  59. content = Template('exclude_hosts_list.j2'),
  60. group = 'hadoop',
  61. )
  62. self.assertResourceCalled('Directory', '/var/run/hadoop',
  63. owner = 'hdfs',
  64. group = 'hadoop',
  65. mode = 0755
  66. )
  67. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  68. owner = 'hdfs',
  69. recursive = True,
  70. )
  71. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  72. owner = 'hdfs',
  73. recursive = True,
  74. )
  75. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  76. action = ['delete'],
  77. not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
  78. )
  79. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  80. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  81. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
  82. )
  83. self.assertResourceCalled('Execute', 'hdfs --config /etc/hadoop/conf dfsadmin -fs hdfs://c6405.ambari.apache.org:8020 -safemode leave',
  84. path = ['/usr/bin'],
  85. tries = 10,
  86. try_sleep = 10,
  87. user = 'hdfs',
  88. )
  89. self.assertResourceCalled('Execute', "hadoop dfsadmin -fs hdfs://c6405.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
  90. path = ['/usr/bin'],
  91. tries = 40,
  92. only_if = None,
  93. user = 'hdfs',
  94. try_sleep = 10,
  95. )
  96. self.assertResourceCalled('HdfsDirectory', '/tmp',
  97. security_enabled = False,
  98. keytab = UnknownConfigurationMock(),
  99. conf_dir = '/etc/hadoop/conf',
  100. hdfs_user = 'hdfs',
  101. kinit_path_local = '/usr/bin/kinit',
  102. mode = 0777,
  103. owner = 'hdfs',
  104. bin_dir = '/usr/bin',
  105. action = ['create_delayed'],
  106. )
  107. self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
  108. security_enabled = False,
  109. keytab = UnknownConfigurationMock(),
  110. conf_dir = '/etc/hadoop/conf',
  111. hdfs_user = 'hdfs',
  112. kinit_path_local = '/usr/bin/kinit',
  113. mode = 0770,
  114. owner = 'ambari-qa',
  115. bin_dir = '/usr/bin',
  116. action = ['create_delayed'],
  117. )
  118. self.assertResourceCalled('HdfsDirectory', None,
  119. security_enabled = False,
  120. keytab = UnknownConfigurationMock(),
  121. conf_dir = '/etc/hadoop/conf',
  122. hdfs_user = 'hdfs',
  123. kinit_path_local = '/usr/bin/kinit',
  124. action = ['create'],
  125. bin_dir = '/usr/bin',
  126. only_if = None,
  127. )
  128. self.assertNoMoreResources()
  129. pass
  130. def test_install_default(self):
  131. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  132. classname = "NameNode",
  133. command = "install",
  134. config_file = "default_no_install.json",
  135. hdp_stack_version = self.STACK_VERSION,
  136. target = RMFTestCase.TARGET_COMMON_SERVICES,
  137. try_install=True
  138. )
  139. self.assert_configure_default()
  140. self.assertNoMoreResources()
  141. pass
  142. def test_start_default(self):
  143. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  144. classname = "NameNode",
  145. command = "start",
  146. config_file = "default.json",
  147. hdp_stack_version = self.STACK_VERSION,
  148. target = RMFTestCase.TARGET_COMMON_SERVICES,
  149. call_mocks = [(5,"")],
  150. )
  151. self.assert_configure_default()
  152. self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
  153. self.assertResourceCalled('Execute', 'yes Y | hdfs --config /etc/hadoop/conf namenode -format',
  154. path = ['/usr/bin'],
  155. user = 'hdfs',
  156. )
  157. self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
  158. recursive = True,
  159. )
  160. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  161. owner = 'hdfs',
  162. content = Template('exclude_hosts_list.j2'),
  163. group = 'hadoop',
  164. )
  165. self.assertResourceCalled('Directory', '/var/run/hadoop',
  166. owner = 'hdfs',
  167. group = 'hadoop',
  168. mode = 0755
  169. )
  170. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  171. owner = 'hdfs',
  172. recursive = True,
  173. )
  174. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  175. owner = 'hdfs',
  176. recursive = True,
  177. )
  178. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  179. action = ['delete'],
  180. not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
  181. )
  182. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  183. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  184. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
  185. )
  186. self.assertResourceCalled('Execute', 'hdfs --config /etc/hadoop/conf dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode leave',
  187. path = ['/usr/bin'],
  188. tries = 10,
  189. try_sleep = 10,
  190. user = 'hdfs',
  191. )
  192. self.assertResourceCalled('Execute', "hadoop dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
  193. path = ['/usr/bin'],
  194. tries = 40,
  195. only_if = None,
  196. user = 'hdfs',
  197. try_sleep = 10,
  198. )
  199. self.assertResourceCalled('HdfsDirectory', '/tmp',
  200. security_enabled = False,
  201. keytab = UnknownConfigurationMock(),
  202. conf_dir = '/etc/hadoop/conf',
  203. hdfs_user = 'hdfs',
  204. kinit_path_local = '/usr/bin/kinit',
  205. mode = 0777,
  206. owner = 'hdfs',
  207. bin_dir = '/usr/bin',
  208. action = ['create_delayed'],
  209. )
  210. self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
  211. security_enabled = False,
  212. keytab = UnknownConfigurationMock(),
  213. conf_dir = '/etc/hadoop/conf',
  214. hdfs_user = 'hdfs',
  215. kinit_path_local = '/usr/bin/kinit',
  216. mode = 0770,
  217. owner = 'ambari-qa',
  218. bin_dir = '/usr/bin',
  219. action = ['create_delayed'],
  220. )
  221. self.assertResourceCalled('HdfsDirectory', None,
  222. security_enabled = False,
  223. keytab = UnknownConfigurationMock(),
  224. conf_dir = '/etc/hadoop/conf',
  225. hdfs_user = 'hdfs',
  226. kinit_path_local = '/usr/bin/kinit',
  227. action = ['create'],
  228. bin_dir = '/usr/bin',
  229. only_if = None,
  230. )
  231. self.assertNoMoreResources()
  232. def test_stop_default(self):
  233. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  234. classname = "NameNode",
  235. command = "stop",
  236. config_file = "default.json",
  237. hdp_stack_version = self.STACK_VERSION,
  238. target = RMFTestCase.TARGET_COMMON_SERVICES
  239. )
  240. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  241. action = ['delete'],
  242. not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
  243. )
  244. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode'",
  245. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  246. not_if = None,
  247. )
  248. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  249. action = ['delete'],
  250. )
  251. self.assertNoMoreResources()
  252. def test_configure_secured(self):
  253. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  254. classname = "NameNode",
  255. command = "configure",
  256. config_file = "secured.json",
  257. hdp_stack_version = self.STACK_VERSION,
  258. target = RMFTestCase.TARGET_COMMON_SERVICES
  259. )
  260. self.assert_configure_secured()
  261. self.assertNoMoreResources()
  262. def test_start_secured(self):
  263. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  264. classname = "NameNode",
  265. command = "start",
  266. config_file = "secured.json",
  267. hdp_stack_version = self.STACK_VERSION,
  268. target = RMFTestCase.TARGET_COMMON_SERVICES,
  269. call_mocks = [(5,"")],
  270. )
  271. self.assert_configure_secured()
  272. self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
  273. self.assertResourceCalled('Execute', 'yes Y | hdfs --config /etc/hadoop/conf namenode -format',
  274. path = ['/usr/bin'],
  275. user = 'hdfs',
  276. )
  277. self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
  278. recursive = True,
  279. )
  280. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  281. owner = 'hdfs',
  282. content = Template('exclude_hosts_list.j2'),
  283. group = 'hadoop',
  284. )
  285. self.assertResourceCalled('Directory', '/var/run/hadoop',
  286. owner = 'hdfs',
  287. group = 'hadoop',
  288. mode = 0755
  289. )
  290. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  291. owner = 'hdfs',
  292. recursive = True,
  293. )
  294. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  295. owner = 'hdfs',
  296. recursive = True,
  297. )
  298. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  299. action = ['delete'],
  300. not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
  301. )
  302. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  303. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  304. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
  305. )
  306. self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
  307. user='hdfs',
  308. )
  309. self.assertResourceCalled('Execute', 'hdfs --config /etc/hadoop/conf dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode leave',
  310. path = ['/usr/bin'],
  311. tries = 10,
  312. try_sleep = 10,
  313. user = 'hdfs',
  314. )
  315. self.assertResourceCalled('Execute', "hadoop dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
  316. path = ['/usr/bin'],
  317. tries = 40,
  318. only_if = None,
  319. user = 'hdfs',
  320. try_sleep = 10,
  321. )
  322. self.assertResourceCalled('HdfsDirectory', '/tmp',
  323. security_enabled = True,
  324. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  325. conf_dir = '/etc/hadoop/conf',
  326. hdfs_user = 'hdfs',
  327. kinit_path_local = '/usr/bin/kinit',
  328. mode = 0777,
  329. owner = 'hdfs',
  330. bin_dir = '/usr/bin',
  331. action = ['create_delayed'],
  332. )
  333. self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
  334. security_enabled = True,
  335. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  336. conf_dir = '/etc/hadoop/conf',
  337. hdfs_user = 'hdfs',
  338. kinit_path_local = '/usr/bin/kinit',
  339. mode = 0770,
  340. owner = 'ambari-qa',
  341. bin_dir = '/usr/bin',
  342. action = ['create_delayed'],
  343. )
  344. self.assertResourceCalled('HdfsDirectory', None,
  345. security_enabled = True,
  346. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  347. conf_dir = '/etc/hadoop/conf',
  348. hdfs_user = 'hdfs',
  349. kinit_path_local = '/usr/bin/kinit',
  350. action = ['create'],
  351. bin_dir = '/usr/bin',
  352. only_if = None,
  353. )
  354. self.assertNoMoreResources()
  355. def test_stop_secured(self):
  356. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  357. classname = "NameNode",
  358. command = "stop",
  359. config_file = "secured.json",
  360. hdp_stack_version = self.STACK_VERSION,
  361. target = RMFTestCase.TARGET_COMMON_SERVICES
  362. )
  363. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  364. action = ['delete'],
  365. not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
  366. )
  367. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode'",
  368. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  369. not_if = None,
  370. )
  371. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  372. action = ['delete'],
  373. )
  374. self.assertNoMoreResources()
  375. def test_start_ha_default(self):
  376. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  377. classname = "NameNode",
  378. command = "start",
  379. config_file = "ha_default.json",
  380. hdp_stack_version = self.STACK_VERSION,
  381. target = RMFTestCase.TARGET_COMMON_SERVICES
  382. )
  383. self.assert_configure_default()
  384. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  385. owner = 'hdfs',
  386. content = Template('exclude_hosts_list.j2'),
  387. group = 'hadoop',
  388. )
  389. self.assertResourceCalled('Directory', '/var/run/hadoop',
  390. owner = 'hdfs',
  391. group = 'hadoop',
  392. mode = 0755
  393. )
  394. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  395. owner = 'hdfs',
  396. recursive = True,
  397. )
  398. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  399. owner = 'hdfs',
  400. recursive = True,
  401. )
  402. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  403. action = ['delete'],
  404. not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
  405. )
  406. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  407. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  408. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
  409. )
  410. self.assertResourceCalled('Execute', "hadoop dfsadmin -fs hdfs://ns1 -safemode get | grep 'Safe mode is OFF'",
  411. path = ['/usr/bin'],
  412. tries = 40,
  413. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  414. user = 'hdfs',
  415. try_sleep = 10,
  416. )
  417. self.assertResourceCalled('HdfsDirectory', '/tmp',
  418. security_enabled = False,
  419. keytab = UnknownConfigurationMock(),
  420. conf_dir = '/etc/hadoop/conf',
  421. hdfs_user = 'hdfs',
  422. kinit_path_local = '/usr/bin/kinit',
  423. mode = 0777,
  424. owner = 'hdfs',
  425. bin_dir = '/usr/bin',
  426. action = ['create_delayed'],
  427. )
  428. self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
  429. security_enabled = False,
  430. keytab = UnknownConfigurationMock(),
  431. conf_dir = '/etc/hadoop/conf',
  432. hdfs_user = 'hdfs',
  433. kinit_path_local = '/usr/bin/kinit',
  434. mode = 0770,
  435. owner = 'ambari-qa',
  436. bin_dir = '/usr/bin',
  437. action = ['create_delayed'],
  438. )
  439. self.assertResourceCalled('HdfsDirectory', None,
  440. security_enabled = False,
  441. keytab = UnknownConfigurationMock(),
  442. conf_dir = '/etc/hadoop/conf',
  443. hdfs_user = 'hdfs',
  444. kinit_path_local = '/usr/bin/kinit',
  445. action = ['create'],
  446. bin_dir = '/usr/bin',
  447. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  448. )
  449. self.assertNoMoreResources()
  450. def test_start_ha_secured(self):
  451. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  452. classname = "NameNode",
  453. command = "start",
  454. config_file = "ha_secured.json",
  455. hdp_stack_version = self.STACK_VERSION,
  456. target = RMFTestCase.TARGET_COMMON_SERVICES
  457. )
  458. self.assert_configure_secured()
  459. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  460. owner = 'hdfs',
  461. content = Template('exclude_hosts_list.j2'),
  462. group = 'hadoop',
  463. )
  464. self.assertResourceCalled('Directory', '/var/run/hadoop',
  465. owner = 'hdfs',
  466. group = 'hadoop',
  467. mode = 0755
  468. )
  469. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  470. owner = 'hdfs',
  471. recursive = True,
  472. )
  473. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  474. owner = 'hdfs',
  475. recursive = True,
  476. )
  477. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  478. action = ['delete'],
  479. not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
  480. )
  481. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  482. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  483. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
  484. )
  485. self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
  486. user = 'hdfs',
  487. )
  488. self.assertResourceCalled('Execute', "hadoop dfsadmin -fs hdfs://ns1 -safemode get | grep 'Safe mode is OFF'",
  489. path = ['/usr/bin'],
  490. tries = 40,
  491. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  492. user = 'hdfs',
  493. try_sleep = 10,
  494. )
  495. self.assertResourceCalled('HdfsDirectory', '/tmp',
  496. security_enabled = True,
  497. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  498. conf_dir = '/etc/hadoop/conf',
  499. hdfs_user = 'hdfs',
  500. kinit_path_local = '/usr/bin/kinit',
  501. mode = 0777,
  502. owner = 'hdfs',
  503. bin_dir = '/usr/bin',
  504. action = ['create_delayed'],
  505. )
  506. self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
  507. security_enabled = True,
  508. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  509. conf_dir = '/etc/hadoop/conf',
  510. hdfs_user = 'hdfs',
  511. kinit_path_local = '/usr/bin/kinit',
  512. mode = 0770,
  513. owner = 'ambari-qa',
  514. bin_dir = '/usr/bin',
  515. action = ['create_delayed'],
  516. )
  517. self.assertResourceCalled('HdfsDirectory', None,
  518. security_enabled = True,
  519. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  520. conf_dir = '/etc/hadoop/conf',
  521. hdfs_user = 'hdfs',
  522. kinit_path_local = '/usr/bin/kinit',
  523. action = ['create'],
  524. bin_dir = '/usr/bin',
  525. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  526. )
  527. self.assertNoMoreResources()
  528. # tests namenode start command when NameNode HA is enabled, and
  529. # the HA cluster is started initially, rather than using the UI Wizard
  530. def test_start_ha_bootstrap_active_from_blueprint(self):
  531. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  532. classname = "NameNode",
  533. command = "start",
  534. config_file="ha_bootstrap_active_node.json",
  535. hdp_stack_version = self.STACK_VERSION,
  536. target = RMFTestCase.TARGET_COMMON_SERVICES
  537. )
  538. self.assert_configure_default()
  539. # verify that active namenode was formatted
  540. self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
  541. self.assertResourceCalled('Execute', 'yes Y | hdfs --config /etc/hadoop/conf namenode -format',
  542. path = ['/usr/bin'],
  543. user = 'hdfs',
  544. )
  545. self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
  546. recursive = True,
  547. )
  548. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  549. owner = 'hdfs',
  550. content = Template('exclude_hosts_list.j2'),
  551. group = 'hadoop',
  552. )
  553. self.assertResourceCalled('Directory', '/var/run/hadoop',
  554. owner = 'hdfs',
  555. group = 'hadoop',
  556. mode = 0755
  557. )
  558. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  559. owner = 'hdfs',
  560. recursive = True,
  561. )
  562. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  563. owner = 'hdfs',
  564. recursive = True,
  565. )
  566. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  567. action = ['delete'],
  568. not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
  569. )
  570. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  571. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  572. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
  573. )
  574. self.assertResourceCalled('Execute', "hadoop dfsadmin -fs hdfs://ns1 -safemode get | grep 'Safe mode is OFF'",
  575. path = ['/usr/bin'],
  576. tries = 40,
  577. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  578. user = 'hdfs',
  579. try_sleep = 10,
  580. )
  581. self.assertResourceCalled('HdfsDirectory', '/tmp',
  582. security_enabled = False,
  583. keytab = UnknownConfigurationMock(),
  584. conf_dir = '/etc/hadoop/conf',
  585. hdfs_user = 'hdfs',
  586. kinit_path_local = '/usr/bin/kinit',
  587. mode = 0777,
  588. owner = 'hdfs',
  589. bin_dir = '/usr/bin',
  590. action = ['create_delayed'],
  591. )
  592. self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
  593. security_enabled = False,
  594. keytab = UnknownConfigurationMock(),
  595. conf_dir = '/etc/hadoop/conf',
  596. hdfs_user = 'hdfs',
  597. kinit_path_local = '/usr/bin/kinit',
  598. mode = 0770,
  599. owner = 'ambari-qa',
  600. bin_dir = '/usr/bin',
  601. action = ['create_delayed'],
  602. )
  603. self.assertResourceCalled('HdfsDirectory', None,
  604. security_enabled = False,
  605. keytab = UnknownConfigurationMock(),
  606. conf_dir = '/etc/hadoop/conf',
  607. hdfs_user = 'hdfs',
  608. kinit_path_local = '/usr/bin/kinit',
  609. action = ['create'],
  610. bin_dir = '/usr/bin',
  611. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  612. )
  613. self.assertNoMoreResources()
  614. # tests namenode start command when NameNode HA is enabled, and
  615. # the HA cluster is started initially, rather than using the UI Wizard
  616. # this test verifies the startup of a "standby" namenode
  617. @patch.object(shell, "call", new=MagicMock(return_value=(5,"")))
  618. def test_start_ha_bootstrap_standby_from_blueprint(self):
  619. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  620. classname = "NameNode",
  621. command = "start",
  622. config_file="ha_bootstrap_standby_node.json",
  623. hdp_stack_version = self.STACK_VERSION,
  624. target = RMFTestCase.TARGET_COMMON_SERVICES
  625. )
  626. self.assert_configure_default()
  627. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  628. owner = 'hdfs',
  629. content = Template('exclude_hosts_list.j2'),
  630. group = 'hadoop',
  631. )
  632. self.assertResourceCalled('Directory', '/var/run/hadoop',
  633. owner = 'hdfs',
  634. group = 'hadoop',
  635. mode = 0755
  636. )
  637. # TODO: Using shell.call() to bootstrap standby which is patched to return status code '5' (i.e. already bootstrapped)
  638. # Need to update the test case to verify that the standby case is detected, and that the bootstrap
  639. # command is run before the namenode launches
  640. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  641. owner = 'hdfs',
  642. recursive = True,
  643. )
  644. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  645. owner = 'hdfs',
  646. recursive = True,
  647. )
  648. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  649. action = ['delete'],
  650. not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
  651. )
  652. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  653. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  654. not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
  655. )
  656. self.assertResourceCalled('Execute', "hadoop dfsadmin -fs hdfs://ns1 -safemode get | grep 'Safe mode is OFF'",
  657. path = ['/usr/bin'],
  658. tries = 40,
  659. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
  660. user = 'hdfs',
  661. try_sleep = 10,
  662. )
  663. self.assertResourceCalled('HdfsDirectory', '/tmp',
  664. security_enabled = False,
  665. keytab = UnknownConfigurationMock(),
  666. conf_dir = '/etc/hadoop/conf',
  667. hdfs_user = 'hdfs',
  668. kinit_path_local = '/usr/bin/kinit',
  669. mode = 0777,
  670. owner = 'hdfs',
  671. bin_dir = '/usr/bin',
  672. action = ['create_delayed'],
  673. )
  674. self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
  675. security_enabled = False,
  676. keytab = UnknownConfigurationMock(),
  677. conf_dir = '/etc/hadoop/conf',
  678. hdfs_user = 'hdfs',
  679. kinit_path_local = '/usr/bin/kinit',
  680. mode = 0770,
  681. owner = 'ambari-qa',
  682. bin_dir = '/usr/bin',
  683. action = ['create_delayed'],
  684. )
  685. self.assertResourceCalled('HdfsDirectory', None,
  686. security_enabled = False,
  687. keytab = UnknownConfigurationMock(),
  688. conf_dir = '/etc/hadoop/conf',
  689. hdfs_user = 'hdfs',
  690. kinit_path_local = '/usr/bin/kinit',
  691. action = ['create'],
  692. bin_dir = '/usr/bin',
  693. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
  694. )
  695. self.assertNoMoreResources()
  696. def test_decommission_default(self):
  697. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  698. classname = "NameNode",
  699. command = "decommission",
  700. config_file = "default.json",
  701. hdp_stack_version = self.STACK_VERSION,
  702. target = RMFTestCase.TARGET_COMMON_SERVICES
  703. )
  704. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  705. owner = 'hdfs',
  706. content = Template('exclude_hosts_list.j2'),
  707. group = 'hadoop',
  708. )
  709. self.assertResourceCalled('Execute', '', user = 'hdfs')
  710. self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -refreshNodes',
  711. user = 'hdfs',
  712. conf_dir = '/etc/hadoop/conf',
  713. bin_dir = '/usr/bin',
  714. kinit_override = True)
  715. self.assertNoMoreResources()
  716. def test_decommission_update_exclude_file_only(self):
  717. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  718. classname = "NameNode",
  719. command = "decommission",
  720. config_file = "default_update_exclude_file_only.json",
  721. hdp_stack_version = self.STACK_VERSION,
  722. target = RMFTestCase.TARGET_COMMON_SERVICES
  723. )
  724. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  725. owner = 'hdfs',
  726. content = Template('exclude_hosts_list.j2'),
  727. group = 'hadoop',
  728. )
  729. self.assertNoMoreResources()
  730. def test_decommission_ha_default(self):
  731. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  732. classname = "NameNode",
  733. command = "decommission",
  734. config_file = "ha_default.json",
  735. hdp_stack_version = self.STACK_VERSION,
  736. target = RMFTestCase.TARGET_COMMON_SERVICES
  737. )
  738. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  739. owner = 'hdfs',
  740. content = Template('exclude_hosts_list.j2'),
  741. group = 'hadoop',
  742. )
  743. self.assertResourceCalled('Execute', '', user = 'hdfs')
  744. self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
  745. user = 'hdfs',
  746. conf_dir = '/etc/hadoop/conf',
  747. bin_dir = '/usr/bin',
  748. kinit_override = True)
  749. self.assertNoMoreResources()
  750. def test_decommission_secured(self):
  751. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  752. classname = "NameNode",
  753. command = "decommission",
  754. config_file = "secured.json",
  755. hdp_stack_version = self.STACK_VERSION,
  756. target = RMFTestCase.TARGET_COMMON_SERVICES
  757. )
  758. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  759. owner = 'hdfs',
  760. content = Template('exclude_hosts_list.j2'),
  761. group = 'hadoop',
  762. )
  763. self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6401.ambari.apache.org@EXAMPLE.COM;',
  764. user = 'hdfs',
  765. )
  766. self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -refreshNodes',
  767. bin_dir = '/usr/bin',
  768. conf_dir = '/etc/hadoop/conf',
  769. kinit_override = True,
  770. user = 'hdfs',
  771. )
  772. self.assertNoMoreResources()
  773. def assert_configure_default(self):
  774. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  775. owner = 'root',
  776. group = 'root',
  777. recursive = True,
  778. )
  779. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  780. content = Template('hdfs.conf.j2'),
  781. owner = 'root',
  782. group = 'root',
  783. mode = 0644,
  784. )
  785. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  786. owner = 'hdfs',
  787. group = 'hadoop',
  788. conf_dir = '/etc/hadoop/conf',
  789. configurations = self.getConfig()['configurations']['hdfs-site'],
  790. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  791. )
  792. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  793. owner = 'hdfs',
  794. group = 'hadoop',
  795. conf_dir = '/etc/hadoop/conf',
  796. configurations = self.getConfig()['configurations']['core-site'],
  797. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  798. mode = 0644
  799. )
  800. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  801. content = Template('slaves.j2'),
  802. owner = 'hdfs',
  803. )
  804. self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
  805. owner = 'hdfs',
  806. group = 'hadoop',
  807. recursive = True,
  808. mode = 0755,
  809. cd_access='a'
  810. )
  811. def assert_configure_secured(self):
  812. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  813. owner = 'root',
  814. group = 'root',
  815. recursive = True,
  816. )
  817. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  818. content = Template('hdfs.conf.j2'),
  819. owner = 'root',
  820. group = 'root',
  821. mode = 0644,
  822. )
  823. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  824. owner = 'hdfs',
  825. group = 'hadoop',
  826. conf_dir = '/etc/hadoop/conf',
  827. configurations = self.getConfig()['configurations']['hdfs-site'],
  828. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  829. )
  830. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  831. owner = 'hdfs',
  832. group = 'hadoop',
  833. conf_dir = '/etc/hadoop/conf',
  834. configurations = self.getConfig()['configurations']['core-site'],
  835. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  836. mode = 0644
  837. )
  838. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  839. content = Template('slaves.j2'),
  840. owner = 'root',
  841. )
  842. self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
  843. owner = 'hdfs',
  844. group = 'hadoop',
  845. recursive = True,
  846. mode = 0755,
  847. cd_access='a'
  848. )
  849. @patch("resource_management.libraries.script.Script.put_structured_out")
  850. def test_rebalance_hdfs(self, pso):
  851. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  852. classname = "NameNode",
  853. command = "rebalancehdfs",
  854. config_file = "rebalancehdfs_default.json",
  855. hdp_stack_version = self.STACK_VERSION,
  856. target = RMFTestCase.TARGET_COMMON_SERVICES
  857. )
  858. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf balancer -threshold -1'",
  859. logoutput = False,
  860. on_new_line = FunctionMock('handle_new_line'),
  861. )
  862. self.assertNoMoreResources()
  863. @patch("resource_management.libraries.script.Script.put_structured_out")
  864. @patch("os.system")
  865. def test_rebalance_secured_hdfs(self, pso, system_mock):
  866. system_mock.return_value = -1
  867. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  868. classname = "NameNode",
  869. command = "rebalancehdfs",
  870. config_file = "rebalancehdfs_secured.json",
  871. hdp_stack_version = self.STACK_VERSION,
  872. target = RMFTestCase.TARGET_COMMON_SERVICES
  873. )
  874. tempdir = tempfile.gettempdir()
  875. ccache_path = os.path.join(tempfile.gettempdir(), "hdfs_rebalance_cc_7add60ca651f1bd1ed909a6668937ba9")
  876. kinit_cmd = "/usr/bin/kinit -c {0} -kt /etc/security/keytabs/hdfs.headless.keytab hdfs@EXAMPLE.COM".format(ccache_path)
  877. rebalance_cmd = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin KRB5CCNAME={0} ; hdfs --config /etc/hadoop/conf balancer -threshold -1'".format(ccache_path)
  878. self.assertResourceCalled('Execute', kinit_cmd,
  879. user = 'hdfs',
  880. )
  881. self.assertResourceCalled('Execute', rebalance_cmd,
  882. logoutput = False,
  883. on_new_line = FunctionMock('handle_new_line'),
  884. )
  885. self.assertNoMoreResources()
  886. @patch("os.path.isfile")
  887. def test_ranger_installed_missing_file(self, isfile_mock):
  888. """
  889. Tests that when Ranger is enabled for HDFS, that an exception is thrown
  890. if there is no install.properties found
  891. :return:
  892. """
  893. isfile_mock.return_value = False
  894. try:
  895. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  896. classname = "NameNode", command = "start", config_file = "ranger-namenode-start.json",
  897. hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES )
  898. self.fail("Expected a failure since the ranger install.properties was missing")
  899. except Fail, failure:
  900. pass
  901. self.assertTrue(isfile_mock.called)
  902. @patch("resource_management.libraries.functions.security_commons.build_expectations")
  903. @patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
  904. @patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
  905. @patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
  906. @patch("resource_management.libraries.script.Script.put_structured_out")
  907. def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
  908. # Test that function works when is called with correct parameters
  909. security_params = {
  910. 'core-site': {
  911. 'hadoop.security.authentication': 'kerberos'
  912. },
  913. 'hdfs-site': {
  914. 'dfs.namenode.keytab.file': 'path/to/namenode/keytab/file',
  915. 'dfs.namenode.kerberos.principal': 'namenode_principal'
  916. }
  917. }
  918. props_value_check = None
  919. props_empty_check = ['dfs.namenode.kerberos.internal.spnego.principal',
  920. 'dfs.namenode.keytab.file',
  921. 'dfs.namenode.kerberos.principal']
  922. props_read_check = ['dfs.namenode.keytab.file']
  923. result_issues = []
  924. get_params_mock.return_value = security_params
  925. validate_security_config_mock.return_value = result_issues
  926. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  927. classname = "NameNode",
  928. command = "security_status",
  929. config_file="secured.json",
  930. hdp_stack_version = self.STACK_VERSION,
  931. target = RMFTestCase.TARGET_COMMON_SERVICES
  932. )
  933. build_exp_mock.assert_called_with('hdfs-site', props_value_check, props_empty_check, props_read_check)
  934. put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
  935. cached_kinit_executor_mock.called_with('/usr/bin/kinit',
  936. self.config_dict['configurations']['hadoop-env']['hdfs_user'],
  937. security_params['hdfs-site']['dfs.namenode.keytab.file'],
  938. security_params['hdfs-site']['dfs.namenode.kerberos.principal'],
  939. self.config_dict['hostname'],
  940. '/tmp')
  941. # Testing when hadoop.security.authentication is simple
  942. security_params['core-site']['hadoop.security.authentication'] = 'simple'
  943. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  944. classname = "NameNode",
  945. command = "security_status",
  946. config_file="secured.json",
  947. hdp_stack_version = self.STACK_VERSION,
  948. target = RMFTestCase.TARGET_COMMON_SERVICES
  949. )
  950. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  951. security_params['core-site']['hadoop.security.authentication'] = 'kerberos'
  952. # Testing that the exception throw by cached_executor is caught
  953. cached_kinit_executor_mock.reset_mock()
  954. cached_kinit_executor_mock.side_effect = Exception("Invalid command")
  955. try:
  956. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  957. classname = "NameNode",
  958. command = "security_status",
  959. config_file="secured.json",
  960. hdp_stack_version = self.STACK_VERSION,
  961. target = RMFTestCase.TARGET_COMMON_SERVICES
  962. )
  963. except:
  964. self.assertTrue(True)
  965. # Testing with a security_params which doesn't contains hdfs-site
  966. empty_security_params = {
  967. 'core-site': {
  968. 'hadoop.security.authentication': 'kerberos'
  969. }
  970. }
  971. cached_kinit_executor_mock.reset_mock()
  972. get_params_mock.reset_mock()
  973. put_structured_out_mock.reset_mock()
  974. get_params_mock.return_value = empty_security_params
  975. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  976. classname = "NameNode",
  977. command = "security_status",
  978. config_file="secured.json",
  979. hdp_stack_version = self.STACK_VERSION,
  980. target = RMFTestCase.TARGET_COMMON_SERVICES
  981. )
  982. put_structured_out_mock.assert_called_with({"securityIssuesFound": "Keytab file or principal are not set property."})
  983. # Testing with not empty result_issues
  984. result_issues_with_params = {
  985. 'hdfs-site': "Something bad happened"
  986. }
  987. validate_security_config_mock.reset_mock()
  988. get_params_mock.reset_mock()
  989. validate_security_config_mock.return_value = result_issues_with_params
  990. get_params_mock.return_value = security_params
  991. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  992. classname = "NameNode",
  993. command = "security_status",
  994. config_file="secured.json",
  995. hdp_stack_version = self.STACK_VERSION,
  996. target = RMFTestCase.TARGET_COMMON_SERVICES
  997. )
  998. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  999. def test_upgrade_restart(self):
  1000. # Execution of nn_ru_lzo invokes a code path that invokes lzo installation, which
  1001. # was failing in RU case. See hdfs.py and the lzo_enabled check that is in it.
  1002. # Just executing the script is enough to test the fix
  1003. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1004. classname = "NameNode",
  1005. command = "restart",
  1006. config_file = "nn_ru_lzo.json",
  1007. hdp_stack_version = self.STACK_VERSION,
  1008. target = RMFTestCase.TARGET_COMMON_SERVICES)
  1009. def test_pre_rolling_restart(self):
  1010. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
  1011. with open(config_file, "r") as f:
  1012. json_content = json.load(f)
  1013. version = '2.2.1.0-3242'
  1014. json_content['commandParams']['version'] = version
  1015. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1016. classname = "NameNode",
  1017. command = "pre_rolling_restart",
  1018. config_dict = json_content,
  1019. hdp_stack_version = self.STACK_VERSION,
  1020. target = RMFTestCase.TARGET_COMMON_SERVICES)
  1021. self.assertResourceCalled('Execute',
  1022. 'hdp-select set hadoop-hdfs-namenode %s' % version)
  1023. self.assertNoMoreResources()
  1024. @patch("resource_management.core.shell.call")
  1025. def test_pre_rolling_restart_23(self, call_mock):
  1026. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
  1027. with open(config_file, "r") as f:
  1028. json_content = json.load(f)
  1029. version = '2.3.0.0-1234'
  1030. json_content['commandParams']['version'] = version
  1031. mocks_dict = {}
  1032. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1033. classname = "NameNode",
  1034. command = "pre_rolling_restart",
  1035. config_dict = json_content,
  1036. hdp_stack_version = self.STACK_VERSION,
  1037. target = RMFTestCase.TARGET_COMMON_SERVICES,
  1038. call_mocks = [(0, None), (0, None)],
  1039. mocks_dict = mocks_dict)
  1040. self.assertResourceCalled('Execute', 'hdp-select set hadoop-hdfs-namenode %s' % version)
  1041. self.assertNoMoreResources()
  1042. self.assertEquals(2, mocks_dict['call'].call_count)
  1043. self.assertEquals(
  1044. "conf-select create-conf-dir --package hadoop --stack-version 2.3.0.0-1234 --conf-version 0",
  1045. mocks_dict['call'].call_args_list[0][0][0])
  1046. self.assertEquals(
  1047. "conf-select set-conf-dir --package hadoop --stack-version 2.3.0.0-1234 --conf-version 0",
  1048. mocks_dict['call'].call_args_list[1][0][0])
  1049. def test_post_rolling_restart(self):
  1050. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
  1051. with open(config_file, "r") as f:
  1052. json_content = json.load(f)
  1053. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1054. classname = "NameNode",
  1055. command = "post_rolling_restart",
  1056. config_dict = json_content,
  1057. hdp_stack_version = self.STACK_VERSION,
  1058. target = RMFTestCase.TARGET_COMMON_SERVICES)
  1059. self.assertResourceCalled('Execute', 'hdfs dfsadmin -report -live',
  1060. user = 'hdfs',
  1061. )
  1062. self.assertNoMoreResources()
  1063. def test_prepare_rolling_upgrade__upgrade(self):
  1064. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/secured.json"
  1065. with open(config_file, "r") as f:
  1066. json_content = json.load(f)
  1067. json_content['commandParams']['upgrade_direction'] = 'upgrade'
  1068. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1069. classname = "NameNode",
  1070. command = "prepare_rolling_upgrade",
  1071. config_dict = json_content,
  1072. hdp_stack_version = self.STACK_VERSION,
  1073. target = RMFTestCase.TARGET_COMMON_SERVICES,
  1074. call_mocks = [(0, "Safe mode is OFF in c6401.ambari.apache.org")])
  1075. self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',)
  1076. self.assertResourceCalled('Execute', 'hdfs dfsadmin -rollingUpgrade prepare',
  1077. logoutput = True,
  1078. user = 'hdfs',
  1079. )
  1080. self.assertResourceCalled('Execute', 'hdfs dfsadmin -rollingUpgrade query',
  1081. logoutput = True,
  1082. user = 'hdfs',
  1083. )
  1084. self.assertNoMoreResources()
  1085. @patch.object(shell, "call")
  1086. def test_prepare_rolling_upgrade__downgrade(self, shell_call_mock):
  1087. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/secured.json"
  1088. with open(config_file, "r") as f:
  1089. json_content = json.load(f)
  1090. json_content['commandParams']['upgrade_direction'] = 'downgrade'
  1091. # Mock safemode_check call
  1092. shell_call_mock.return_value = 0, "Safe mode is OFF in c6401.ambari.apache.org"
  1093. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1094. classname = "NameNode",
  1095. command = "prepare_rolling_upgrade",
  1096. config_dict = json_content,
  1097. hdp_stack_version = self.STACK_VERSION,
  1098. target = RMFTestCase.TARGET_COMMON_SERVICES)
  1099. self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',)
  1100. self.assertNoMoreResources()
  1101. def test_finalize_rolling_upgrade(self):
  1102. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
  1103. with open(config_file, "r") as f:
  1104. json_content = json.load(f)
  1105. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1106. classname = "NameNode",
  1107. command = "finalize_rolling_upgrade",
  1108. config_dict = json_content,
  1109. hdp_stack_version = self.STACK_VERSION,
  1110. target = RMFTestCase.TARGET_COMMON_SERVICES)
  1111. self.assertResourceCalled('Execute', 'hdfs dfsadmin -rollingUpgrade query',
  1112. logoutput = True,
  1113. user = 'hdfs',
  1114. )
  1115. self.assertResourceCalled('Execute', 'hdfs dfsadmin -rollingUpgrade finalize',
  1116. logoutput = True,
  1117. user = 'hdfs',
  1118. )
  1119. self.assertResourceCalled('Execute', 'hdfs dfsadmin -rollingUpgrade query',
  1120. logoutput = True,
  1121. user = 'hdfs',
  1122. )
  1123. self.assertNoMoreResources()
  1124. @patch("resource_management.core.shell.call")
  1125. def test_pre_rolling_restart_21_and_lower_params(self, call_mock):
  1126. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
  1127. with open(config_file, "r") as f:
  1128. json_content = json.load(f)
  1129. json_content['hostLevelParams']['stack_name'] = 'HDP'
  1130. json_content['hostLevelParams']['stack_version'] = '2.0'
  1131. mocks_dict = {}
  1132. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1133. classname = "NameNode",
  1134. command = "pre_rolling_restart",
  1135. config_dict = json_content,
  1136. hdp_stack_version = self.STACK_VERSION,
  1137. target = RMFTestCase.TARGET_COMMON_SERVICES,
  1138. call_mocks = [(0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None)],
  1139. mocks_dict = mocks_dict)
  1140. import sys
  1141. self.assertEquals("/etc/hadoop/conf", sys.modules["params"].hadoop_conf_dir)
  1142. self.assertEquals("/usr/lib/hadoop/libexec", sys.modules["params"].hadoop_libexec_dir)
  1143. self.assertEquals("/usr/bin", sys.modules["params"].hadoop_bin_dir)
  1144. self.assertEquals("/usr/lib/hadoop/sbin", sys.modules["params"].hadoop_bin)
  1145. @patch("resource_management.core.shell.call")
  1146. def test_pre_rolling_restart_22_params(self, call_mock):
  1147. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
  1148. with open(config_file, "r") as f:
  1149. json_content = json.load(f)
  1150. version = '2.2.0.0-1234'
  1151. del json_content['commandParams']['version']
  1152. json_content['hostLevelParams']['stack_name'] = 'HDP'
  1153. json_content['hostLevelParams']['stack_version'] = '2.2'
  1154. mocks_dict = {}
  1155. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1156. classname = "NameNode",
  1157. command = "pre_rolling_restart",
  1158. config_dict = json_content,
  1159. hdp_stack_version = self.STACK_VERSION,
  1160. target = RMFTestCase.TARGET_COMMON_SERVICES,
  1161. call_mocks = [(0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None)],
  1162. mocks_dict = mocks_dict)
  1163. import sys
  1164. self.assertEquals("/usr/hdp/current/hadoop-client/conf", sys.modules["params"].hadoop_conf_dir)
  1165. self.assertEquals("/usr/hdp/current/hadoop-client/libexec", sys.modules["params"].hadoop_libexec_dir)
  1166. self.assertEquals("/usr/hdp/current/hadoop-client/bin", sys.modules["params"].hadoop_bin_dir)
  1167. self.assertEquals("/usr/hdp/current/hadoop-client/sbin", sys.modules["params"].hadoop_bin)
  1168. @patch("resource_management.core.shell.call")
  1169. def test_pre_rolling_restart_23_params(self, call_mock):
  1170. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
  1171. with open(config_file, "r") as f:
  1172. json_content = json.load(f)
  1173. version = '2.3.0.0-1234'
  1174. json_content['commandParams']['version'] = version
  1175. json_content['commandParams']['upgrade_direction'] = 'upgrade'
  1176. json_content['hostLevelParams']['stack_name'] = 'HDP'
  1177. json_content['hostLevelParams']['stack_version'] = '2.3'
  1178. mocks_dict = {}
  1179. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1180. classname = "NameNode",
  1181. command = "pre_rolling_restart",
  1182. config_dict = json_content,
  1183. hdp_stack_version = self.STACK_VERSION,
  1184. target = RMFTestCase.TARGET_COMMON_SERVICES,
  1185. call_mocks = [(0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None)],
  1186. mocks_dict = mocks_dict)
  1187. import sys
  1188. self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/conf", sys.modules["params"].hadoop_conf_dir)
  1189. self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/libexec", sys.modules["params"].hadoop_libexec_dir)
  1190. self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/bin", sys.modules["params"].hadoop_bin_dir)
  1191. self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/sbin", sys.modules["params"].hadoop_bin)
  1192. class Popen_Mock:
  1193. return_value = 1
  1194. lines = ['Time Stamp Iteration# Bytes Already Moved Bytes Left To Move Bytes Being Moved\n',
  1195. 'Jul 28, 2014 5:01:49 PM 0 0 B 5.74 GB 9.79 GB\n',
  1196. 'Jul 28, 2014 5:03:00 PM 1 0 B 5.58 GB 9.79 GB\n',
  1197. '']
  1198. def __call__(self, *args,**kwargs):
  1199. popen = MagicMock()
  1200. popen.returncode = Popen_Mock.return_value
  1201. popen.stdout.readline = MagicMock(side_effect = Popen_Mock.lines)
  1202. return popen