test_namenode.py 75 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531
  1. #!/usr/bin/env python
  2. '''
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. from ambari_commons import OSCheck
  17. '''
  18. import json
  19. import os
  20. import tempfile
  21. from stacks.utils.RMFTestCase import *
  22. from mock.mock import MagicMock, patch, call
  23. import resource_management
  24. from resource_management.core import shell
  25. from resource_management.core.exceptions import Fail
  26. class TestNamenode(RMFTestCase):
  27. COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package"
  28. STACK_VERSION = "2.0.6"
  29. def test_configure_default(self):
  30. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  31. classname = "NameNode",
  32. command = "configure",
  33. config_file = "default.json",
  34. hdp_stack_version = self.STACK_VERSION,
  35. target = RMFTestCase.TARGET_COMMON_SERVICES
  36. )
  37. self.assert_configure_default()
  38. self.assertNoMoreResources()
  39. def test_start_default_alt_fs(self):
  40. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  41. classname = "NameNode",
  42. command = "start",
  43. config_file = "altfs_plus_hdfs.json",
  44. hdp_stack_version = self.STACK_VERSION,
  45. target = RMFTestCase.TARGET_COMMON_SERVICES,
  46. call_mocks = [(0,"")],
  47. )
  48. self.assert_configure_default()
  49. self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
  50. self.assertResourceCalled('Execute', 'yes Y | hdfs --config /etc/hadoop/conf namenode -format',
  51. path = ['/usr/bin'],
  52. user = 'hdfs',
  53. )
  54. self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
  55. recursive = True,
  56. )
  57. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  58. owner = 'hdfs',
  59. content = Template('exclude_hosts_list.j2'),
  60. group = 'hadoop',
  61. )
  62. self.assertResourceCalled('Directory', '/var/run/hadoop',
  63. owner = 'hdfs',
  64. group = 'hadoop',
  65. mode = 0755
  66. )
  67. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  68. owner = 'hdfs',
  69. recursive = True,
  70. )
  71. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  72. owner = 'hdfs',
  73. recursive = True,
  74. )
  75. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  76. action = ['delete'],
  77. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  78. )
  79. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  80. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  81. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  82. )
  83. self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6405.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
  84. tries=180,
  85. try_sleep=10,
  86. user="hdfs",
  87. logoutput=True
  88. )
  89. self.assertResourceCalled('HdfsResource', '/tmp',
  90. security_enabled = False,
  91. only_if=True,
  92. keytab = UnknownConfigurationMock(),
  93. hadoop_bin_dir = '/usr/bin',
  94. default_fs = 'wasb://abc@c6401.ambari.apache.org',
  95. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  96. kinit_path_local = '/usr/bin/kinit',
  97. principal_name = None,
  98. user = 'hdfs',
  99. owner = 'hdfs',
  100. dfs_type = '',
  101. hadoop_conf_dir = '/etc/hadoop/conf',
  102. type = 'directory',
  103. action = ['create_on_execute'],
  104. mode = 0777,
  105. )
  106. self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
  107. security_enabled = False,
  108. only_if=True,
  109. keytab = UnknownConfigurationMock(),
  110. hadoop_bin_dir = '/usr/bin',
  111. default_fs = 'wasb://abc@c6401.ambari.apache.org',
  112. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  113. kinit_path_local = '/usr/bin/kinit',
  114. principal_name = None,
  115. user = 'hdfs',
  116. dfs_type = '',
  117. owner = 'ambari-qa',
  118. hadoop_conf_dir = '/etc/hadoop/conf',
  119. type = 'directory',
  120. action = ['create_on_execute'],
  121. mode = 0770,
  122. )
  123. self.assertResourceCalled('HdfsResource', None,
  124. security_enabled = False,
  125. only_if=True,
  126. keytab = UnknownConfigurationMock(),
  127. hadoop_bin_dir = '/usr/bin',
  128. default_fs = 'wasb://abc@c6401.ambari.apache.org',
  129. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  130. kinit_path_local = '/usr/bin/kinit',
  131. principal_name = None,
  132. user = 'hdfs',
  133. dfs_type = '',
  134. action = ['execute'],
  135. hadoop_conf_dir = '/etc/hadoop/conf',
  136. )
  137. self.assertNoMoreResources()
  138. def test_install_default(self):
  139. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  140. classname = "NameNode",
  141. command = "install",
  142. config_file = "default_no_install.json",
  143. hdp_stack_version = self.STACK_VERSION,
  144. target = RMFTestCase.TARGET_COMMON_SERVICES,
  145. try_install=True
  146. )
  147. self.assert_configure_default()
  148. self.assertNoMoreResources()
  149. pass
  150. def test_start_default(self):
  151. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  152. classname = "NameNode",
  153. command = "start",
  154. config_file = "default.json",
  155. hdp_stack_version = self.STACK_VERSION,
  156. target = RMFTestCase.TARGET_COMMON_SERVICES,
  157. call_mocks = [(0,"")],
  158. )
  159. self.assert_configure_default()
  160. self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
  161. self.assertResourceCalled('Execute', 'yes Y | hdfs --config /etc/hadoop/conf namenode -format',
  162. path = ['/usr/bin'],
  163. user = 'hdfs',
  164. )
  165. self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
  166. recursive = True,
  167. )
  168. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  169. owner = 'hdfs',
  170. content = Template('exclude_hosts_list.j2'),
  171. group = 'hadoop',
  172. )
  173. self.assertResourceCalled('Directory', '/var/run/hadoop',
  174. owner = 'hdfs',
  175. group = 'hadoop',
  176. mode = 0755
  177. )
  178. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  179. owner = 'hdfs',
  180. recursive = True,
  181. )
  182. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  183. owner = 'hdfs',
  184. recursive = True,
  185. )
  186. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  187. action = ['delete'],
  188. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  189. )
  190. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  191. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  192. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  193. )
  194. self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
  195. tries=180,
  196. try_sleep=10,
  197. user="hdfs",
  198. logoutput=True
  199. )
  200. self.assertResourceCalled('HdfsResource', '/tmp',
  201. security_enabled = False,
  202. only_if = True,
  203. keytab = UnknownConfigurationMock(),
  204. hadoop_bin_dir = '/usr/bin',
  205. default_fs = 'hdfs://c6401.ambari.apache.org:8020',
  206. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  207. kinit_path_local = '/usr/bin/kinit',
  208. principal_name = None,
  209. user = 'hdfs',
  210. owner = 'hdfs',
  211. dfs_type = '',
  212. hadoop_conf_dir = '/etc/hadoop/conf',
  213. type = 'directory',
  214. action = ['create_on_execute'],
  215. mode = 0777,
  216. )
  217. self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
  218. security_enabled = False,
  219. only_if = True,
  220. keytab = UnknownConfigurationMock(),
  221. hadoop_bin_dir = '/usr/bin',
  222. default_fs = 'hdfs://c6401.ambari.apache.org:8020',
  223. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  224. kinit_path_local = '/usr/bin/kinit',
  225. principal_name = None,
  226. user = 'hdfs',
  227. owner = 'ambari-qa',
  228. dfs_type = '',
  229. hadoop_conf_dir = '/etc/hadoop/conf',
  230. type = 'directory',
  231. action = ['create_on_execute'],
  232. mode = 0770,
  233. )
  234. self.assertResourceCalled('HdfsResource', None,
  235. security_enabled = False,
  236. only_if = True,
  237. keytab = UnknownConfigurationMock(),
  238. hadoop_bin_dir = '/usr/bin',
  239. default_fs = 'hdfs://c6401.ambari.apache.org:8020',
  240. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  241. kinit_path_local = '/usr/bin/kinit',
  242. principal_name = None,
  243. user = 'hdfs',
  244. dfs_type = '',
  245. action = ['execute'],
  246. hadoop_conf_dir = '/etc/hadoop/conf',
  247. )
  248. self.assertNoMoreResources()
  249. def test_stop_default(self):
  250. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  251. classname = "NameNode",
  252. command = "stop",
  253. config_file = "default.json",
  254. hdp_stack_version = self.STACK_VERSION,
  255. target = RMFTestCase.TARGET_COMMON_SERVICES
  256. )
  257. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  258. action = ['delete'],
  259. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  260. )
  261. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode'",
  262. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  263. not_if = None,
  264. )
  265. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  266. action = ['delete'],
  267. )
  268. self.assertNoMoreResources()
  269. def test_configure_secured(self):
  270. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  271. classname = "NameNode",
  272. command = "configure",
  273. config_file = "secured.json",
  274. hdp_stack_version = self.STACK_VERSION,
  275. target = RMFTestCase.TARGET_COMMON_SERVICES
  276. )
  277. self.assert_configure_secured()
  278. self.assertNoMoreResources()
  279. def test_start_secured(self):
  280. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  281. classname = "NameNode",
  282. command = "start",
  283. config_file = "secured.json",
  284. hdp_stack_version = self.STACK_VERSION,
  285. target = RMFTestCase.TARGET_COMMON_SERVICES,
  286. call_mocks = [(0,"")],
  287. )
  288. self.assert_configure_secured()
  289. self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
  290. self.assertResourceCalled('Execute', 'yes Y | hdfs --config /etc/hadoop/conf namenode -format',
  291. path = ['/usr/bin'],
  292. user = 'hdfs',
  293. )
  294. self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
  295. recursive = True,
  296. )
  297. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  298. owner = 'hdfs',
  299. content = Template('exclude_hosts_list.j2'),
  300. group = 'hadoop',
  301. )
  302. self.assertResourceCalled('Directory', '/var/run/hadoop',
  303. owner = 'hdfs',
  304. group = 'hadoop',
  305. mode = 0755
  306. )
  307. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  308. owner = 'hdfs',
  309. recursive = True,
  310. )
  311. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  312. owner = 'hdfs',
  313. recursive = True,
  314. )
  315. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  316. action = ['delete'],
  317. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  318. )
  319. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  320. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  321. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  322. )
  323. self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
  324. user='hdfs',
  325. )
  326. self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
  327. tries=180,
  328. try_sleep=10,
  329. user="hdfs",
  330. logoutput=True
  331. )
  332. self.assertResourceCalled('HdfsResource', '/tmp',
  333. security_enabled = True,
  334. hadoop_bin_dir = '/usr/bin',
  335. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  336. kinit_path_local = '/usr/bin/kinit',
  337. user = 'hdfs',
  338. owner = 'hdfs',
  339. dfs_type = '',
  340. hadoop_conf_dir = '/etc/hadoop/conf',
  341. type = 'directory',
  342. action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
  343. mode = 0777,
  344. only_if = True
  345. )
  346. self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
  347. security_enabled = True,
  348. hadoop_bin_dir = '/usr/bin',
  349. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  350. kinit_path_local = '/usr/bin/kinit',
  351. user = 'hdfs',
  352. dfs_type = '',
  353. owner = 'ambari-qa',
  354. hadoop_conf_dir = '/etc/hadoop/conf',
  355. type = 'directory',
  356. action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
  357. mode = 0770,
  358. only_if = True
  359. )
  360. self.assertResourceCalled('HdfsResource', None,
  361. security_enabled = True,
  362. only_if = True,
  363. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  364. hadoop_bin_dir = '/usr/bin',
  365. kinit_path_local = '/usr/bin/kinit',
  366. user = 'hdfs',
  367. dfs_type = '',
  368. action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
  369. hadoop_conf_dir = '/etc/hadoop/conf',
  370. )
  371. self.assertNoMoreResources()
  372. def test_stop_secured(self):
  373. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  374. classname = "NameNode",
  375. command = "stop",
  376. config_file = "secured.json",
  377. hdp_stack_version = self.STACK_VERSION,
  378. target = RMFTestCase.TARGET_COMMON_SERVICES
  379. )
  380. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  381. action = ['delete'],
  382. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  383. )
  384. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode'",
  385. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  386. not_if = None,
  387. )
  388. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  389. action = ['delete'],
  390. )
  391. self.assertNoMoreResources()
  392. def test_start_ha_default(self):
  393. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  394. classname = "NameNode",
  395. command = "start",
  396. config_file = "ha_default.json",
  397. hdp_stack_version = self.STACK_VERSION,
  398. target = RMFTestCase.TARGET_COMMON_SERVICES
  399. )
  400. self.assert_configure_default()
  401. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  402. owner = 'hdfs',
  403. content = Template('exclude_hosts_list.j2'),
  404. group = 'hadoop',
  405. )
  406. self.assertResourceCalled('Directory', '/var/run/hadoop',
  407. owner = 'hdfs',
  408. group = 'hadoop',
  409. mode = 0755
  410. )
  411. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  412. owner = 'hdfs',
  413. recursive = True,
  414. )
  415. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  416. owner = 'hdfs',
  417. recursive = True,
  418. )
  419. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  420. action = ['delete'],
  421. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  422. )
  423. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  424. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  425. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  426. )
  427. self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://ns1 -safemode get | grep 'Safe mode is OFF'",
  428. tries=180,
  429. try_sleep=10,
  430. user="hdfs",
  431. logoutput=True
  432. )
  433. self.assertResourceCalled('HdfsResource', '/tmp',
  434. security_enabled = False,
  435. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  436. keytab = UnknownConfigurationMock(),
  437. hadoop_bin_dir = '/usr/bin',
  438. default_fs = 'hdfs://ns1',
  439. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  440. kinit_path_local = '/usr/bin/kinit',
  441. principal_name = None,
  442. user = 'hdfs',
  443. dfs_type = '',
  444. owner = 'hdfs',
  445. hadoop_conf_dir = '/etc/hadoop/conf',
  446. type = 'directory',
  447. action = ['create_on_execute'],
  448. mode = 0777,
  449. )
  450. self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
  451. security_enabled = False,
  452. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  453. keytab = UnknownConfigurationMock(),
  454. hadoop_bin_dir = '/usr/bin',
  455. default_fs = 'hdfs://ns1',
  456. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  457. kinit_path_local = '/usr/bin/kinit',
  458. principal_name = None,
  459. user = 'hdfs',
  460. dfs_type = '',
  461. owner = 'ambari-qa',
  462. hadoop_conf_dir = '/etc/hadoop/conf',
  463. type = 'directory',
  464. action = ['create_on_execute'],
  465. mode = 0770,
  466. )
  467. self.assertResourceCalled('HdfsResource', None,
  468. security_enabled = False,
  469. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  470. keytab = UnknownConfigurationMock(),
  471. hadoop_bin_dir = '/usr/bin',
  472. default_fs = 'hdfs://ns1',
  473. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  474. kinit_path_local = '/usr/bin/kinit',
  475. principal_name = None,
  476. user = 'hdfs',
  477. dfs_type = '',
  478. action = ['execute'],
  479. hadoop_conf_dir = '/etc/hadoop/conf',
  480. )
  481. self.assertNoMoreResources()
  482. def test_start_ha_secured(self):
  483. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  484. classname = "NameNode",
  485. command = "start",
  486. config_file = "ha_secured.json",
  487. hdp_stack_version = self.STACK_VERSION,
  488. target = RMFTestCase.TARGET_COMMON_SERVICES
  489. )
  490. self.assert_configure_secured()
  491. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  492. owner = 'hdfs',
  493. content = Template('exclude_hosts_list.j2'),
  494. group = 'hadoop',
  495. )
  496. self.assertResourceCalled('Directory', '/var/run/hadoop',
  497. owner = 'hdfs',
  498. group = 'hadoop',
  499. mode = 0755
  500. )
  501. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  502. owner = 'hdfs',
  503. recursive = True,
  504. )
  505. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  506. owner = 'hdfs',
  507. recursive = True,
  508. )
  509. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  510. action = ['delete'],
  511. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  512. )
  513. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  514. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  515. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  516. )
  517. self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
  518. user = 'hdfs',
  519. )
  520. self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://ns1 -safemode get | grep 'Safe mode is OFF'",
  521. tries=180,
  522. try_sleep=10,
  523. user="hdfs",
  524. logoutput=True
  525. )
  526. self.assertResourceCalled('HdfsResource', '/tmp',
  527. security_enabled = True,
  528. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  529. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  530. hadoop_bin_dir = '/usr/bin',
  531. default_fs = 'hdfs://ns1',
  532. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  533. kinit_path_local = '/usr/bin/kinit',
  534. principal_name = 'hdfs',
  535. user = 'hdfs',
  536. dfs_type = '',
  537. owner = 'hdfs',
  538. hadoop_conf_dir = '/etc/hadoop/conf',
  539. type = 'directory',
  540. action = ['create_on_execute'],
  541. mode = 0777,
  542. )
  543. self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
  544. security_enabled = True,
  545. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  546. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  547. hadoop_bin_dir = '/usr/bin',
  548. default_fs = 'hdfs://ns1',
  549. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  550. kinit_path_local = '/usr/bin/kinit',
  551. principal_name = 'hdfs',
  552. user = 'hdfs',
  553. dfs_type = '',
  554. owner = 'ambari-qa',
  555. hadoop_conf_dir = '/etc/hadoop/conf',
  556. type = 'directory',
  557. action = ['create_on_execute'],
  558. mode = 0770,
  559. )
  560. self.assertResourceCalled('HdfsResource', None,
  561. security_enabled = True,
  562. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  563. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  564. hadoop_bin_dir = '/usr/bin',
  565. default_fs = 'hdfs://ns1',
  566. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  567. kinit_path_local = '/usr/bin/kinit',
  568. principal_name = 'hdfs',
  569. user = 'hdfs',
  570. dfs_type = '',
  571. action = ['execute'],
  572. hadoop_conf_dir = '/etc/hadoop/conf',
  573. )
  574. self.assertNoMoreResources()
  575. # tests namenode start command when NameNode HA is enabled, and
  576. # the HA cluster is started initially, rather than using the UI Wizard
  577. def test_start_ha_bootstrap_active_from_blueprint(self):
  578. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  579. classname = "NameNode",
  580. command = "start",
  581. config_file="ha_bootstrap_active_node.json",
  582. hdp_stack_version = self.STACK_VERSION,
  583. target = RMFTestCase.TARGET_COMMON_SERVICES
  584. )
  585. self.assert_configure_default()
  586. # verify that active namenode was formatted
  587. self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
  588. self.assertResourceCalled('Execute', 'yes Y | hdfs --config /etc/hadoop/conf namenode -format',
  589. path = ['/usr/bin'],
  590. user = 'hdfs',
  591. )
  592. self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
  593. recursive = True,
  594. )
  595. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  596. owner = 'hdfs',
  597. content = Template('exclude_hosts_list.j2'),
  598. group = 'hadoop',
  599. )
  600. self.assertResourceCalled('Directory', '/var/run/hadoop',
  601. owner = 'hdfs',
  602. group = 'hadoop',
  603. mode = 0755
  604. )
  605. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  606. owner = 'hdfs',
  607. recursive = True,
  608. )
  609. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  610. owner = 'hdfs',
  611. recursive = True,
  612. )
  613. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  614. action = ['delete'],
  615. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  616. )
  617. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  618. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  619. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  620. )
  621. self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://ns1 -safemode get | grep 'Safe mode is OFF'",
  622. tries=180,
  623. try_sleep=10,
  624. user="hdfs",
  625. logoutput=True
  626. )
  627. self.assertResourceCalled('HdfsResource', '/tmp',
  628. security_enabled = False,
  629. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  630. keytab = UnknownConfigurationMock(),
  631. hadoop_bin_dir = '/usr/bin',
  632. default_fs = 'hdfs://ns1',
  633. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  634. kinit_path_local = '/usr/bin/kinit',
  635. principal_name = None,
  636. user = 'hdfs',
  637. dfs_type = '',
  638. owner = 'hdfs',
  639. hadoop_conf_dir = '/etc/hadoop/conf',
  640. type = 'directory',
  641. action = ['create_on_execute'],
  642. mode = 0777,
  643. )
  644. self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
  645. security_enabled = False,
  646. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  647. keytab = UnknownConfigurationMock(),
  648. hadoop_bin_dir = '/usr/bin',
  649. default_fs = 'hdfs://ns1',
  650. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  651. kinit_path_local = '/usr/bin/kinit',
  652. principal_name = None,
  653. user = 'hdfs',
  654. dfs_type = '',
  655. owner = 'ambari-qa',
  656. hadoop_conf_dir = '/etc/hadoop/conf',
  657. type = 'directory',
  658. action = ['create_on_execute'],
  659. mode = 0770,
  660. )
  661. self.assertResourceCalled('HdfsResource', None,
  662. security_enabled = False,
  663. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  664. keytab = UnknownConfigurationMock(),
  665. hadoop_bin_dir = '/usr/bin',
  666. default_fs = 'hdfs://ns1',
  667. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  668. kinit_path_local = '/usr/bin/kinit',
  669. principal_name = None,
  670. user = 'hdfs',
  671. dfs_type = '',
  672. action = ['execute'],
  673. hadoop_conf_dir = '/etc/hadoop/conf',
  674. )
  675. self.assertNoMoreResources()
  676. # tests namenode start command when NameNode HA is enabled, and
  677. # the HA cluster is started initially, rather than using the UI Wizard
  678. # this test verifies the startup of a "standby" namenode
  679. @patch.object(shell, "call")
  680. def test_start_ha_bootstrap_standby_from_blueprint(self, call_mocks):
  681. call_mocks = MagicMock(return_value=(0,""))
  682. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  683. classname = "NameNode",
  684. command = "start",
  685. config_file="ha_bootstrap_standby_node.json",
  686. hdp_stack_version = self.STACK_VERSION,
  687. target = RMFTestCase.TARGET_COMMON_SERVICES,
  688. call_mocks = call_mocks
  689. )
  690. self.assert_configure_default()
  691. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  692. owner = 'hdfs',
  693. content = Template('exclude_hosts_list.j2'),
  694. group = 'hadoop',
  695. )
  696. self.assertResourceCalled('Directory', '/var/run/hadoop',
  697. owner = 'hdfs',
  698. group = 'hadoop',
  699. mode = 0755
  700. )
  701. # TODO: Using shell.call() to bootstrap standby which is patched to return status code '5' (i.e. already bootstrapped)
  702. # Need to update the test case to verify that the standby case is detected, and that the bootstrap
  703. # command is run before the namenode launches
  704. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  705. owner = 'hdfs',
  706. recursive = True,
  707. )
  708. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  709. owner = 'hdfs',
  710. recursive = True,
  711. )
  712. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  713. action = ['delete'],
  714. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  715. )
  716. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  717. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  718. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  719. )
  720. self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://ns1 -safemode get | grep 'Safe mode is OFF'",
  721. tries=180,
  722. try_sleep=10,
  723. user="hdfs",
  724. logoutput=True
  725. )
  726. self.assertResourceCalled('HdfsResource', '/tmp',
  727. security_enabled = False,
  728. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
  729. keytab = UnknownConfigurationMock(),
  730. hadoop_bin_dir = '/usr/bin',
  731. default_fs = 'hdfs://ns1',
  732. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  733. kinit_path_local = '/usr/bin/kinit',
  734. principal_name = None,
  735. user = 'hdfs',
  736. dfs_type = '',
  737. owner = 'hdfs',
  738. hadoop_conf_dir = '/etc/hadoop/conf',
  739. type = 'directory',
  740. action = ['create_on_execute'],
  741. mode = 0777,
  742. )
  743. self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
  744. security_enabled = False,
  745. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
  746. keytab = UnknownConfigurationMock(),
  747. hadoop_bin_dir = '/usr/bin',
  748. default_fs = 'hdfs://ns1',
  749. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  750. kinit_path_local = '/usr/bin/kinit',
  751. principal_name = None,
  752. user = 'hdfs',
  753. dfs_type = '',
  754. owner = 'ambari-qa',
  755. hadoop_conf_dir = '/etc/hadoop/conf',
  756. type = 'directory',
  757. action = ['create_on_execute'],
  758. mode = 0770,
  759. )
  760. self.assertResourceCalled('HdfsResource', None,
  761. security_enabled = False,
  762. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
  763. keytab = UnknownConfigurationMock(),
  764. hadoop_bin_dir = '/usr/bin',
  765. default_fs = 'hdfs://ns1',
  766. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  767. kinit_path_local = '/usr/bin/kinit',
  768. principal_name = None,
  769. user = 'hdfs',
  770. dfs_type = '',
  771. action = ['execute'],
  772. hadoop_conf_dir = '/etc/hadoop/conf',
  773. )
  774. self.assertNoMoreResources()
  775. self.assertTrue(call_mocks.called)
  776. self.assertEqual(2, call_mocks.call_count)
  777. calls = [
  778. call('hdfs namenode -bootstrapStandby -nonInteractive', logoutput=False, user=u'hdfs'),
  779. call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'", logoutput=True)]
  780. call_mocks.assert_has_calls(calls, any_order=False)
  781. # tests namenode start command when NameNode HA is enabled, and
  782. # the HA cluster is started initially, rather than using the UI Wizard
  783. # this test verifies the startup of a "standby" namenode
  784. @patch.object(shell, "call")
  785. def test_start_ha_bootstrap_standby_from_blueprint_initial_start(self, call_mocks):
  786. call_mocks = MagicMock()
  787. call_mocks.side_effect = [(1, None), (0, None), (0, None)]
  788. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  789. classname = "NameNode",
  790. command = "start",
  791. config_file="ha_bootstrap_standby_node_initial_start.json",
  792. hdp_stack_version = self.STACK_VERSION,
  793. target = RMFTestCase.TARGET_COMMON_SERVICES,
  794. call_mocks = call_mocks
  795. )
  796. self.assert_configure_default()
  797. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  798. owner = 'hdfs',
  799. content = Template('exclude_hosts_list.j2'),
  800. group = 'hadoop',
  801. )
  802. self.assertResourceCalled('Directory', '/var/run/hadoop',
  803. owner = 'hdfs',
  804. group = 'hadoop',
  805. mode = 0755
  806. )
  807. # TODO: Using shell.call() to bootstrap standby which is patched to return status code '5' (i.e. already bootstrapped)
  808. # Need to update the test case to verify that the standby case is detected, and that the bootstrap
  809. # command is run before the namenode launches
  810. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  811. owner = 'hdfs',
  812. recursive = True,
  813. )
  814. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  815. owner = 'hdfs',
  816. recursive = True,
  817. )
  818. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  819. action = ['delete'],
  820. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  821. )
  822. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  823. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  824. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  825. )
  826. self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://ns1 -safemode get | grep 'Safe mode is OFF'",
  827. tries=180,
  828. try_sleep=10,
  829. user="hdfs",
  830. logoutput=True
  831. )
  832. self.assertResourceCalled('HdfsResource', '/tmp',
  833. security_enabled = False,
  834. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
  835. keytab = UnknownConfigurationMock(),
  836. hadoop_bin_dir = '/usr/bin',
  837. default_fs = 'hdfs://ns1',
  838. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  839. kinit_path_local = '/usr/bin/kinit',
  840. principal_name = None,
  841. user = 'hdfs',
  842. dfs_type = '',
  843. owner = 'hdfs',
  844. hadoop_conf_dir = '/etc/hadoop/conf',
  845. type = 'directory',
  846. action = ['create_on_execute'],
  847. mode = 0777,
  848. )
  849. self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
  850. security_enabled = False,
  851. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
  852. keytab = UnknownConfigurationMock(),
  853. hadoop_bin_dir = '/usr/bin',
  854. default_fs = 'hdfs://ns1',
  855. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  856. kinit_path_local = '/usr/bin/kinit',
  857. principal_name = None,
  858. user = 'hdfs',
  859. dfs_type = '',
  860. owner = 'ambari-qa',
  861. hadoop_conf_dir = '/etc/hadoop/conf',
  862. type = 'directory',
  863. action = ['create_on_execute'],
  864. mode = 0770,
  865. )
  866. self.assertResourceCalled('HdfsResource', None,
  867. security_enabled = False,
  868. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
  869. keytab = UnknownConfigurationMock(),
  870. hadoop_bin_dir = '/usr/bin',
  871. default_fs = 'hdfs://ns1',
  872. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  873. kinit_path_local = '/usr/bin/kinit',
  874. principal_name = None,
  875. user = 'hdfs',
  876. dfs_type = '',
  877. action = ['execute'],
  878. hadoop_conf_dir = '/etc/hadoop/conf',
  879. )
  880. self.assertNoMoreResources()
  881. self.assertTrue(call_mocks.called)
  882. self.assertEqual(3, call_mocks.call_count)
  883. calls = [
  884. call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'", logoutput=True),
  885. call('hdfs namenode -bootstrapStandby -nonInteractive -force', logoutput=False, user=u'hdfs'),
  886. call('hdfs namenode -bootstrapStandby -nonInteractive -force', logoutput=False, user=u'hdfs')]
  887. call_mocks.assert_has_calls(calls, any_order=True)
  888. def test_decommission_default(self):
  889. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  890. classname = "NameNode",
  891. command = "decommission",
  892. config_file = "default.json",
  893. hdp_stack_version = self.STACK_VERSION,
  894. target = RMFTestCase.TARGET_COMMON_SERVICES
  895. )
  896. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  897. owner = 'hdfs',
  898. content = Template('exclude_hosts_list.j2'),
  899. group = 'hadoop',
  900. )
  901. self.assertResourceCalled('Execute', '', user = 'hdfs')
  902. self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
  903. user = 'hdfs',
  904. conf_dir = '/etc/hadoop/conf',
  905. bin_dir = '/usr/bin',
  906. kinit_override = True)
  907. self.assertNoMoreResources()
  908. def test_decommission_update_exclude_file_only(self):
  909. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  910. classname = "NameNode",
  911. command = "decommission",
  912. config_file = "default_update_exclude_file_only.json",
  913. hdp_stack_version = self.STACK_VERSION,
  914. target = RMFTestCase.TARGET_COMMON_SERVICES
  915. )
  916. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  917. owner = 'hdfs',
  918. content = Template('exclude_hosts_list.j2'),
  919. group = 'hadoop',
  920. )
  921. self.assertNoMoreResources()
  922. def test_decommission_ha_default(self):
  923. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  924. classname = "NameNode",
  925. command = "decommission",
  926. config_file = "ha_default.json",
  927. hdp_stack_version = self.STACK_VERSION,
  928. target = RMFTestCase.TARGET_COMMON_SERVICES
  929. )
  930. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  931. owner = 'hdfs',
  932. content = Template('exclude_hosts_list.j2'),
  933. group = 'hadoop',
  934. )
  935. self.assertResourceCalled('Execute', '', user = 'hdfs')
  936. self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
  937. user = 'hdfs',
  938. conf_dir = '/etc/hadoop/conf',
  939. bin_dir = '/usr/bin',
  940. kinit_override = True)
  941. self.assertNoMoreResources()
  942. def test_decommission_secured(self):
  943. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  944. classname = "NameNode",
  945. command = "decommission",
  946. config_file = "secured.json",
  947. hdp_stack_version = self.STACK_VERSION,
  948. target = RMFTestCase.TARGET_COMMON_SERVICES
  949. )
  950. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  951. owner = 'hdfs',
  952. content = Template('exclude_hosts_list.j2'),
  953. group = 'hadoop',
  954. )
  955. self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6401.ambari.apache.org@EXAMPLE.COM;',
  956. user = 'hdfs',
  957. )
  958. self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
  959. bin_dir = '/usr/bin',
  960. conf_dir = '/etc/hadoop/conf',
  961. kinit_override = True,
  962. user = 'hdfs',
  963. )
  964. self.assertNoMoreResources()
  965. def assert_configure_default(self):
  966. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
  967. recursive = True,
  968. )
  969. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64',
  970. recursive = True,
  971. )
  972. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
  973. to = '/usr/lib/hadoop/lib/libsnappy.so',
  974. )
  975. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
  976. to = '/usr/lib/hadoop/lib64/libsnappy.so',
  977. )
  978. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  979. owner = 'root',
  980. group = 'root',
  981. recursive = True,
  982. )
  983. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  984. content = Template('hdfs.conf.j2'),
  985. owner = 'root',
  986. group = 'root',
  987. mode = 0644,
  988. )
  989. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  990. owner = 'hdfs',
  991. group = 'hadoop',
  992. conf_dir = '/etc/hadoop/conf',
  993. configurations = self.getConfig()['configurations']['hdfs-site'],
  994. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  995. )
  996. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  997. owner = 'hdfs',
  998. group = 'hadoop',
  999. conf_dir = '/etc/hadoop/conf',
  1000. configurations = self.getConfig()['configurations']['core-site'],
  1001. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  1002. mode = 0644
  1003. )
  1004. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  1005. content = Template('slaves.j2'),
  1006. owner = 'hdfs',
  1007. )
  1008. self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
  1009. owner = 'hdfs',
  1010. group = 'hadoop',
  1011. recursive = True,
  1012. mode = 0755,
  1013. cd_access='a'
  1014. )
  1015. def assert_configure_secured(self):
  1016. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
  1017. recursive = True,
  1018. )
  1019. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64',
  1020. recursive = True,
  1021. )
  1022. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
  1023. to = '/usr/lib/hadoop/lib/libsnappy.so',
  1024. )
  1025. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
  1026. to = '/usr/lib/hadoop/lib64/libsnappy.so',
  1027. )
  1028. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  1029. owner = 'root',
  1030. group = 'root',
  1031. recursive = True,
  1032. )
  1033. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  1034. content = Template('hdfs.conf.j2'),
  1035. owner = 'root',
  1036. group = 'root',
  1037. mode = 0644,
  1038. )
  1039. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  1040. owner = 'hdfs',
  1041. group = 'hadoop',
  1042. conf_dir = '/etc/hadoop/conf',
  1043. configurations = self.getConfig()['configurations']['hdfs-site'],
  1044. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  1045. )
  1046. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  1047. owner = 'hdfs',
  1048. group = 'hadoop',
  1049. conf_dir = '/etc/hadoop/conf',
  1050. configurations = self.getConfig()['configurations']['core-site'],
  1051. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  1052. mode = 0644
  1053. )
  1054. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  1055. content = Template('slaves.j2'),
  1056. owner = 'root',
  1057. )
  1058. self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
  1059. owner = 'hdfs',
  1060. group = 'hadoop',
  1061. recursive = True,
  1062. mode = 0755,
  1063. cd_access='a'
  1064. )
  1065. @patch("resource_management.libraries.script.Script.put_structured_out")
  1066. def test_rebalance_hdfs(self, pso):
  1067. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1068. classname = "NameNode",
  1069. command = "rebalancehdfs",
  1070. config_file = "rebalancehdfs_default.json",
  1071. hdp_stack_version = self.STACK_VERSION,
  1072. target = RMFTestCase.TARGET_COMMON_SERVICES
  1073. )
  1074. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf balancer -threshold -1'",
  1075. logoutput = False,
  1076. on_new_line = FunctionMock('handle_new_line'),
  1077. )
  1078. self.assertNoMoreResources()
  1079. @patch("resource_management.libraries.script.Script.put_structured_out")
  1080. @patch("os.system")
  1081. def test_rebalance_secured_hdfs(self, pso, system_mock):
  1082. system_mock.return_value = -1
  1083. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1084. classname = "NameNode",
  1085. command = "rebalancehdfs",
  1086. config_file = "rebalancehdfs_secured.json",
  1087. hdp_stack_version = self.STACK_VERSION,
  1088. target = RMFTestCase.TARGET_COMMON_SERVICES,
  1089. call_mocks=[(1, "no kinit")]
  1090. )
  1091. tempdir = tempfile.gettempdir()
  1092. ccache_path = os.path.join(tempfile.gettempdir(), "hdfs_rebalance_cc_7add60ca651f1bd1ed909a6668937ba9")
  1093. kinit_cmd = "/usr/bin/kinit -c {0} -kt /etc/security/keytabs/hdfs.headless.keytab hdfs@EXAMPLE.COM".format(ccache_path)
  1094. rebalance_cmd = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin KRB5CCNAME={0} ; hdfs --config /etc/hadoop/conf balancer -threshold -1'".format(ccache_path)
  1095. self.assertResourceCalled('Execute', kinit_cmd,
  1096. user = 'hdfs',
  1097. )
  1098. self.assertResourceCalled('Execute', rebalance_cmd,
  1099. logoutput = False,
  1100. on_new_line = FunctionMock('handle_new_line'),
  1101. )
  1102. self.assertNoMoreResources()
  1103. @patch("os.path.isfile")
  1104. def test_ranger_installed_missing_file(self, isfile_mock):
  1105. """
  1106. Tests that when Ranger is enabled for HDFS, that an exception is thrown
  1107. if there is no install.properties found
  1108. :return:
  1109. """
  1110. isfile_mock.return_value = False
  1111. try:
  1112. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1113. classname = "NameNode", command = "start", config_file = "ranger-namenode-start.json",
  1114. hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES )
  1115. self.fail("Expected a failure since the ranger install.properties was missing")
  1116. except Fail, failure:
  1117. pass
  1118. self.assertTrue(isfile_mock.called)
  1119. @patch("resource_management.libraries.functions.security_commons.build_expectations")
  1120. @patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
  1121. @patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
  1122. @patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
  1123. @patch("resource_management.libraries.script.Script.put_structured_out")
  1124. def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
  1125. # Test that function works when is called with correct parameters
  1126. security_params = {
  1127. 'core-site': {
  1128. 'hadoop.security.authentication': 'kerberos'
  1129. },
  1130. 'hdfs-site': {
  1131. 'dfs.namenode.keytab.file': 'path/to/namenode/keytab/file',
  1132. 'dfs.namenode.kerberos.principal': 'namenode_principal'
  1133. }
  1134. }
  1135. props_value_check = None
  1136. props_empty_check = ['dfs.namenode.kerberos.internal.spnego.principal',
  1137. 'dfs.namenode.keytab.file',
  1138. 'dfs.namenode.kerberos.principal']
  1139. props_read_check = ['dfs.namenode.keytab.file']
  1140. result_issues = []
  1141. get_params_mock.return_value = security_params
  1142. validate_security_config_mock.return_value = result_issues
  1143. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1144. classname = "NameNode",
  1145. command = "security_status",
  1146. config_file="secured.json",
  1147. hdp_stack_version = self.STACK_VERSION,
  1148. target = RMFTestCase.TARGET_COMMON_SERVICES
  1149. )
  1150. build_exp_mock.assert_called_with('hdfs-site', props_value_check, props_empty_check, props_read_check)
  1151. put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
  1152. cached_kinit_executor_mock.called_with('/usr/bin/kinit',
  1153. self.config_dict['configurations']['hadoop-env']['hdfs_user'],
  1154. security_params['hdfs-site']['dfs.namenode.keytab.file'],
  1155. security_params['hdfs-site']['dfs.namenode.kerberos.principal'],
  1156. self.config_dict['hostname'],
  1157. '/tmp')
  1158. # Testing when hadoop.security.authentication is simple
  1159. security_params['core-site']['hadoop.security.authentication'] = 'simple'
  1160. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1161. classname = "NameNode",
  1162. command = "security_status",
  1163. config_file="secured.json",
  1164. hdp_stack_version = self.STACK_VERSION,
  1165. target = RMFTestCase.TARGET_COMMON_SERVICES
  1166. )
  1167. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  1168. security_params['core-site']['hadoop.security.authentication'] = 'kerberos'
  1169. # Testing that the exception throw by cached_executor is caught
  1170. cached_kinit_executor_mock.reset_mock()
  1171. cached_kinit_executor_mock.side_effect = Exception("Invalid command")
  1172. try:
  1173. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1174. classname = "NameNode",
  1175. command = "security_status",
  1176. config_file="secured.json",
  1177. hdp_stack_version = self.STACK_VERSION,
  1178. target = RMFTestCase.TARGET_COMMON_SERVICES
  1179. )
  1180. except:
  1181. self.assertTrue(True)
  1182. # Testing with a security_params which doesn't contains hdfs-site
  1183. empty_security_params = {
  1184. 'core-site': {
  1185. 'hadoop.security.authentication': 'kerberos'
  1186. }
  1187. }
  1188. cached_kinit_executor_mock.reset_mock()
  1189. get_params_mock.reset_mock()
  1190. put_structured_out_mock.reset_mock()
  1191. get_params_mock.return_value = empty_security_params
  1192. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1193. classname = "NameNode",
  1194. command = "security_status",
  1195. config_file="secured.json",
  1196. hdp_stack_version = self.STACK_VERSION,
  1197. target = RMFTestCase.TARGET_COMMON_SERVICES
  1198. )
  1199. put_structured_out_mock.assert_called_with({"securityIssuesFound": "Keytab file or principal are not set property."})
  1200. # Testing with not empty result_issues
  1201. result_issues_with_params = {
  1202. 'hdfs-site': "Something bad happened"
  1203. }
  1204. validate_security_config_mock.reset_mock()
  1205. get_params_mock.reset_mock()
  1206. validate_security_config_mock.return_value = result_issues_with_params
  1207. get_params_mock.return_value = security_params
  1208. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1209. classname = "NameNode",
  1210. command = "security_status",
  1211. config_file="secured.json",
  1212. hdp_stack_version = self.STACK_VERSION,
  1213. target = RMFTestCase.TARGET_COMMON_SERVICES
  1214. )
  1215. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  1216. @patch("utils.get_namenode_states")
  1217. def test_upgrade_restart(self, get_namenode_states_mock):
  1218. # Execution of nn_ru_lzo invokes a code path that invokes lzo installation, which
  1219. # was failing in RU case. See hdfs.py and the lzo_enabled check that is in it.
  1220. # Just executing the script is enough to test the fix
  1221. active_namenodes = [('nn1', 'c6401.ambari.apache.org:50070')]
  1222. standby_namenodes = [('nn2', 'c6402.ambari.apache.org:50070')]
  1223. unknown_namenodes = []
  1224. get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
  1225. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1226. classname = "NameNode",
  1227. command = "restart",
  1228. config_file = "nn_ru_lzo.json",
  1229. hdp_stack_version = self.STACK_VERSION,
  1230. target = RMFTestCase.TARGET_COMMON_SERVICES)
  1231. unknown_namenodes = active_namenodes
  1232. active_namenodes = []
  1233. get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
  1234. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1235. classname = "NameNode",
  1236. command = "restart",
  1237. config_file = "nn_ru_lzo.json",
  1238. hdp_stack_version = self.STACK_VERSION,
  1239. target = RMFTestCase.TARGET_COMMON_SERVICES)
  1240. def test_pre_upgrade_restart(self):
  1241. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
  1242. with open(config_file, "r") as f:
  1243. json_content = json.load(f)
  1244. version = '2.2.1.0-3242'
  1245. json_content['commandParams']['version'] = version
  1246. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1247. classname = "NameNode",
  1248. command = "pre_upgrade_restart",
  1249. config_dict = json_content,
  1250. hdp_stack_version = self.STACK_VERSION,
  1251. target = RMFTestCase.TARGET_COMMON_SERVICES)
  1252. self.assertResourceCalled('Execute',
  1253. ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-hdfs-namenode', version), sudo=True)
  1254. self.assertNoMoreResources()
  1255. @patch("resource_management.core.shell.call")
  1256. def test_pre_upgrade_restart_23(self, call_mock):
  1257. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
  1258. with open(config_file, "r") as f:
  1259. json_content = json.load(f)
  1260. version = '2.3.0.0-1234'
  1261. json_content['commandParams']['version'] = version
  1262. mocks_dict = {}
  1263. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1264. classname = "NameNode",
  1265. command = "pre_upgrade_restart",
  1266. config_dict = json_content,
  1267. hdp_stack_version = self.STACK_VERSION,
  1268. target = RMFTestCase.TARGET_COMMON_SERVICES,
  1269. call_mocks = [(0, None), (0, None)],
  1270. mocks_dict = mocks_dict)
  1271. self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-hdfs-namenode', version), sudo=True)
  1272. self.assertNoMoreResources()
  1273. self.assertEquals(1, mocks_dict['call'].call_count)
  1274. self.assertEquals(1, mocks_dict['checked_call'].call_count)
  1275. self.assertEquals(
  1276. ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
  1277. mocks_dict['checked_call'].call_args_list[0][0][0])
  1278. self.assertEquals(
  1279. ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
  1280. mocks_dict['call'].call_args_list[0][0][0])
  1281. def test_post_upgrade_restart(self):
  1282. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
  1283. with open(config_file, "r") as f:
  1284. json_content = json.load(f)
  1285. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1286. classname = "NameNode",
  1287. command = "post_upgrade_restart",
  1288. config_dict = json_content,
  1289. hdp_stack_version = self.STACK_VERSION,
  1290. target = RMFTestCase.TARGET_COMMON_SERVICES)
  1291. self.assertResourceCalled('Execute', 'hdfs dfsadmin -report -live',
  1292. user='hdfs',
  1293. tries=60,
  1294. try_sleep=10
  1295. )
  1296. self.assertNoMoreResources()
  1297. def test_prepare_rolling_upgrade__upgrade(self):
  1298. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/secured.json"
  1299. with open(config_file, "r") as f:
  1300. json_content = json.load(f)
  1301. json_content['commandParams']['upgrade_direction'] = 'upgrade'
  1302. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1303. classname = "NameNode",
  1304. command = "prepare_rolling_upgrade",
  1305. config_dict = json_content,
  1306. hdp_stack_version = self.STACK_VERSION,
  1307. target = RMFTestCase.TARGET_COMMON_SERVICES,
  1308. call_mocks = [(0, "Safe mode is OFF in c6401.ambari.apache.org")])
  1309. self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
  1310. logoutput = True, user = 'hdfs')
  1311. self.assertResourceCalled('Execute', 'hdfs dfsadmin -rollingUpgrade prepare',
  1312. logoutput = True, user = 'hdfs')
  1313. self.assertResourceCalled('Execute', 'hdfs dfsadmin -rollingUpgrade query',
  1314. logoutput = True, user = 'hdfs')
  1315. self.assertNoMoreResources()
  1316. @patch.object(shell, "call")
  1317. def test_prepare_rolling_upgrade__downgrade(self, shell_call_mock):
  1318. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/secured.json"
  1319. with open(config_file, "r") as f:
  1320. json_content = json.load(f)
  1321. json_content['commandParams']['upgrade_direction'] = 'downgrade'
  1322. # Mock safemode_check call
  1323. shell_call_mock.return_value = 0, "Safe mode is OFF in c6401.ambari.apache.org"
  1324. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1325. classname = "NameNode",
  1326. command = "prepare_rolling_upgrade",
  1327. config_dict = json_content,
  1328. hdp_stack_version = self.STACK_VERSION,
  1329. target = RMFTestCase.TARGET_COMMON_SERVICES)
  1330. self.assertResourceCalled('Execute',
  1331. '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
  1332. logoutput = True, user = 'hdfs')
  1333. self.assertNoMoreResources()
  1334. def test_finalize_rolling_upgrade(self):
  1335. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
  1336. with open(config_file, "r") as f:
  1337. json_content = json.load(f)
  1338. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1339. classname = "NameNode",
  1340. command = "finalize_rolling_upgrade",
  1341. config_dict = json_content,
  1342. hdp_stack_version = self.STACK_VERSION,
  1343. target = RMFTestCase.TARGET_COMMON_SERVICES)
  1344. self.assertResourceCalled('Execute', 'hdfs dfsadmin -rollingUpgrade query',
  1345. logoutput = True,
  1346. user = 'hdfs',
  1347. )
  1348. self.assertResourceCalled('Execute', 'hdfs dfsadmin -rollingUpgrade finalize',
  1349. logoutput = True,
  1350. user = 'hdfs',
  1351. )
  1352. self.assertResourceCalled('Execute', 'hdfs dfsadmin -rollingUpgrade query',
  1353. logoutput = True,
  1354. user = 'hdfs',
  1355. )
  1356. self.assertNoMoreResources()
  1357. @patch.object(shell, "call")
  1358. def test_pre_upgrade_restart_21_and_lower_params(self, call_mock):
  1359. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
  1360. with open(config_file, "r") as f:
  1361. json_content = json.load(f)
  1362. json_content['hostLevelParams']['stack_name'] = 'HDP'
  1363. json_content['hostLevelParams']['stack_version'] = '2.0'
  1364. mocks_dict = {}
  1365. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1366. classname = "NameNode",
  1367. command = "pre_upgrade_restart",
  1368. config_dict = json_content,
  1369. hdp_stack_version = self.STACK_VERSION,
  1370. target = RMFTestCase.TARGET_COMMON_SERVICES,
  1371. call_mocks = [(0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None)],
  1372. mocks_dict = mocks_dict)
  1373. import sys
  1374. self.assertEquals("/etc/hadoop/conf", sys.modules["params"].hadoop_conf_dir)
  1375. self.assertEquals("/usr/lib/hadoop/libexec", sys.modules["params"].hadoop_libexec_dir)
  1376. self.assertEquals("/usr/bin", sys.modules["params"].hadoop_bin_dir)
  1377. self.assertEquals("/usr/lib/hadoop/sbin", sys.modules["params"].hadoop_bin)
  1378. @patch.object(shell, "call")
  1379. def test_pre_upgrade_restart_22_params(self, call_mock):
  1380. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
  1381. with open(config_file, "r") as f:
  1382. json_content = json.load(f)
  1383. version = '2.2.0.0-1234'
  1384. del json_content['commandParams']['version']
  1385. json_content['hostLevelParams']['stack_name'] = 'HDP'
  1386. json_content['hostLevelParams']['stack_version'] = '2.2'
  1387. mocks_dict = {}
  1388. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1389. classname = "NameNode",
  1390. command = "pre_upgrade_restart",
  1391. config_dict = json_content,
  1392. hdp_stack_version = self.STACK_VERSION,
  1393. target = RMFTestCase.TARGET_COMMON_SERVICES,
  1394. call_mocks = [(0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None)],
  1395. mocks_dict = mocks_dict)
  1396. import sys
  1397. self.assertEquals("/usr/hdp/current/hadoop-client/conf", sys.modules["params"].hadoop_conf_dir)
  1398. self.assertEquals("/usr/hdp/current/hadoop-client/libexec", sys.modules["params"].hadoop_libexec_dir)
  1399. self.assertEquals("/usr/hdp/current/hadoop-client/bin", sys.modules["params"].hadoop_bin_dir)
  1400. self.assertEquals("/usr/hdp/current/hadoop-client/sbin", sys.modules["params"].hadoop_bin)
  1401. @patch.object(shell, "call")
  1402. def test_pre_upgrade_restart_23_params(self, call_mock):
  1403. import itertools
  1404. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
  1405. with open(config_file, "r") as f:
  1406. json_content = json.load(f)
  1407. version = '2.3.0.0-1234'
  1408. json_content['commandParams']['version'] = version
  1409. json_content['commandParams']['upgrade_direction'] = 'upgrade'
  1410. json_content['hostLevelParams']['stack_name'] = 'HDP'
  1411. json_content['hostLevelParams']['stack_version'] = '2.3'
  1412. mocks_dict = {}
  1413. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1414. classname = "NameNode",
  1415. command = "pre_upgrade_restart",
  1416. config_dict = json_content,
  1417. hdp_stack_version = self.STACK_VERSION,
  1418. target = RMFTestCase.TARGET_COMMON_SERVICES,
  1419. call_mocks = itertools.cycle([(0, None)]),
  1420. mocks_dict = mocks_dict)
  1421. import sys
  1422. self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/conf", sys.modules["params"].hadoop_conf_dir)
  1423. self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/libexec", sys.modules["params"].hadoop_libexec_dir)
  1424. self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/bin", sys.modules["params"].hadoop_bin_dir)
  1425. self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/sbin", sys.modules["params"].hadoop_bin)
  1426. class Popen_Mock:
  1427. return_value = 1
  1428. lines = ['Time Stamp Iteration# Bytes Already Moved Bytes Left To Move Bytes Being Moved\n',
  1429. 'Jul 28, 2014 5:01:49 PM 0 0 B 5.74 GB 9.79 GB\n',
  1430. 'Jul 28, 2014 5:03:00 PM 1 0 B 5.58 GB 9.79 GB\n',
  1431. '']
  1432. def __call__(self, *args,**kwargs):
  1433. popen = MagicMock()
  1434. popen.returncode = Popen_Mock.return_value
  1435. popen.stdout.readline = MagicMock(side_effect = Popen_Mock.lines)
  1436. return popen