test_namenode.py 91 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793
  1. #!/usr/bin/env python
  2. '''
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. from ambari_commons import OSCheck
  17. '''
  18. import json
  19. import os
  20. import tempfile
  21. import time
  22. from stacks.utils.RMFTestCase import *
  23. from mock.mock import MagicMock, patch, call
  24. from resource_management.libraries.script.script import Script
  25. from resource_management.core import shell
  26. from resource_management.core.exceptions import Fail
  27. class TestNamenode(RMFTestCase):
  28. COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package"
  29. STACK_VERSION = "2.0.6"
  30. DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done', '/app-logs', '/tmp']
  31. def test_configure_default(self):
  32. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  33. classname = "NameNode",
  34. command = "configure",
  35. config_file = "default.json",
  36. stack_version = self.STACK_VERSION,
  37. target = RMFTestCase.TARGET_COMMON_SERVICES
  38. )
  39. self.assert_configure_default()
  40. self.assertNoMoreResources()
  41. def test_start_default_alt_fs(self):
  42. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  43. classname = "NameNode",
  44. command = "start",
  45. config_file = "altfs_plus_hdfs.json",
  46. stack_version = self.STACK_VERSION,
  47. target = RMFTestCase.TARGET_COMMON_SERVICES,
  48. call_mocks = [(0,"")],
  49. )
  50. self.assert_configure_default()
  51. self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
  52. self.assertResourceCalled('Execute', 'yes Y | hdfs --config /etc/hadoop/conf namenode -format',
  53. path = ['/usr/bin'],
  54. user = 'hdfs',
  55. )
  56. self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
  57. create_parents = True,
  58. )
  59. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  60. owner = 'hdfs',
  61. content = Template('exclude_hosts_list.j2'),
  62. group = 'hadoop',
  63. )
  64. self.assertResourceCalled('Directory', '/var/run/hadoop',
  65. owner = 'hdfs',
  66. group = 'hadoop',
  67. mode = 0755
  68. )
  69. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  70. owner = 'hdfs',
  71. group = 'hadoop',
  72. create_parents = True,
  73. )
  74. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  75. owner = 'hdfs',
  76. group = 'hadoop',
  77. create_parents = True,
  78. )
  79. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  80. action = ['delete'],
  81. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  82. )
  83. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  84. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  85. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  86. )
  87. self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6405.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
  88. tries=115,
  89. try_sleep=10,
  90. user="hdfs",
  91. logoutput=True
  92. )
  93. self.assertResourceCalled('HdfsResource', '/tmp',
  94. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  95. security_enabled = False,
  96. only_if=True,
  97. keytab = UnknownConfigurationMock(),
  98. hadoop_bin_dir = '/usr/bin',
  99. default_fs = 'wasb://abc@c6401.ambari.apache.org',
  100. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  101. kinit_path_local = '/usr/bin/kinit',
  102. principal_name = None,
  103. user = 'hdfs',
  104. owner = 'hdfs',
  105. dfs_type = '',
  106. hadoop_conf_dir = '/etc/hadoop/conf',
  107. type = 'directory',
  108. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  109. mode = 0777,
  110. )
  111. self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
  112. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  113. security_enabled = False,
  114. only_if=True,
  115. keytab = UnknownConfigurationMock(),
  116. hadoop_bin_dir = '/usr/bin',
  117. default_fs = 'wasb://abc@c6401.ambari.apache.org',
  118. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  119. kinit_path_local = '/usr/bin/kinit',
  120. principal_name = None,
  121. user = 'hdfs',
  122. dfs_type = '',
  123. owner = 'ambari-qa',
  124. hadoop_conf_dir = '/etc/hadoop/conf',
  125. type = 'directory',
  126. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  127. mode = 0770,
  128. )
  129. self.assertResourceCalled('HdfsResource', None,
  130. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  131. security_enabled = False,
  132. only_if=True,
  133. keytab = UnknownConfigurationMock(),
  134. hadoop_bin_dir = '/usr/bin',
  135. default_fs = 'wasb://abc@c6401.ambari.apache.org',
  136. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  137. kinit_path_local = '/usr/bin/kinit',
  138. principal_name = None,
  139. user = 'hdfs',
  140. dfs_type = '',
  141. action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  142. hadoop_conf_dir = '/etc/hadoop/conf',
  143. )
  144. self.assertNoMoreResources()
  145. def test_install_default(self):
  146. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  147. classname = "NameNode",
  148. command = "install",
  149. config_file = "default_no_install.json",
  150. stack_version = self.STACK_VERSION,
  151. target = RMFTestCase.TARGET_COMMON_SERVICES,
  152. try_install=True
  153. )
  154. self.assert_configure_default()
  155. self.assertNoMoreResources()
  156. pass
  157. def test_start_default(self):
  158. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  159. classname = "NameNode",
  160. command = "start",
  161. config_file = "default.json",
  162. stack_version = self.STACK_VERSION,
  163. target = RMFTestCase.TARGET_COMMON_SERVICES,
  164. call_mocks = [(0,"")],
  165. )
  166. self.assert_configure_default()
  167. self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
  168. self.assertResourceCalled('Execute', 'yes Y | hdfs --config /etc/hadoop/conf namenode -format',
  169. path = ['/usr/bin'],
  170. user = 'hdfs',
  171. )
  172. self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
  173. create_parents = True,
  174. )
  175. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  176. owner = 'hdfs',
  177. content = Template('exclude_hosts_list.j2'),
  178. group = 'hadoop',
  179. )
  180. self.assertResourceCalled('Directory', '/var/run/hadoop',
  181. owner = 'hdfs',
  182. group = 'hadoop',
  183. mode = 0755
  184. )
  185. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  186. owner = 'hdfs',
  187. create_parents = True,
  188. group = 'hadoop'
  189. )
  190. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  191. owner = 'hdfs',
  192. create_parents = True,
  193. group = 'hadoop'
  194. )
  195. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  196. action = ['delete'],
  197. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  198. )
  199. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  200. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  201. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  202. )
  203. self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
  204. tries=115,
  205. try_sleep=10,
  206. user="hdfs",
  207. logoutput=True
  208. )
  209. self.assertResourceCalled('HdfsResource', '/tmp',
  210. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  211. security_enabled = False,
  212. only_if = True,
  213. keytab = UnknownConfigurationMock(),
  214. hadoop_bin_dir = '/usr/bin',
  215. default_fs = 'hdfs://c6401.ambari.apache.org:8020',
  216. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  217. kinit_path_local = '/usr/bin/kinit',
  218. principal_name = None,
  219. user = 'hdfs',
  220. owner = 'hdfs',
  221. dfs_type = '',
  222. hadoop_conf_dir = '/etc/hadoop/conf',
  223. type = 'directory',
  224. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  225. mode = 0777,
  226. )
  227. self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
  228. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  229. security_enabled = False,
  230. only_if = True,
  231. keytab = UnknownConfigurationMock(),
  232. hadoop_bin_dir = '/usr/bin',
  233. default_fs = 'hdfs://c6401.ambari.apache.org:8020',
  234. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  235. kinit_path_local = '/usr/bin/kinit',
  236. principal_name = None,
  237. user = 'hdfs',
  238. owner = 'ambari-qa',
  239. dfs_type = '',
  240. hadoop_conf_dir = '/etc/hadoop/conf',
  241. type = 'directory',
  242. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  243. mode = 0770,
  244. )
  245. self.assertResourceCalled('HdfsResource', None,
  246. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  247. security_enabled = False,
  248. only_if = True,
  249. keytab = UnknownConfigurationMock(),
  250. hadoop_bin_dir = '/usr/bin',
  251. default_fs = 'hdfs://c6401.ambari.apache.org:8020',
  252. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  253. kinit_path_local = '/usr/bin/kinit',
  254. principal_name = None,
  255. user = 'hdfs',
  256. dfs_type = '',
  257. action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  258. hadoop_conf_dir = '/etc/hadoop/conf',
  259. )
  260. self.assertNoMoreResources()
  261. def test_stop_default(self):
  262. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  263. classname = "NameNode",
  264. command = "stop",
  265. config_file = "default.json",
  266. stack_version = self.STACK_VERSION,
  267. target = RMFTestCase.TARGET_COMMON_SERVICES
  268. )
  269. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode'",
  270. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  271. only_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid")
  272. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',action = ['delete'])
  273. self.assertNoMoreResources()
  274. def test_configure_secured(self):
  275. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  276. classname = "NameNode",
  277. command = "configure",
  278. config_file = "secured.json",
  279. stack_version = self.STACK_VERSION,
  280. target = RMFTestCase.TARGET_COMMON_SERVICES
  281. )
  282. self.assert_configure_secured()
  283. self.assertNoMoreResources()
  284. def test_start_secured(self):
  285. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  286. classname = "NameNode",
  287. command = "start",
  288. config_file = "secured.json",
  289. stack_version = self.STACK_VERSION,
  290. target = RMFTestCase.TARGET_COMMON_SERVICES,
  291. call_mocks = [(0,"")],
  292. )
  293. self.assert_configure_secured()
  294. self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
  295. self.assertResourceCalled('Execute', 'yes Y | hdfs --config /etc/hadoop/conf namenode -format',
  296. path = ['/usr/bin'],
  297. user = 'hdfs',
  298. )
  299. self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
  300. create_parents = True,
  301. )
  302. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  303. owner = 'hdfs',
  304. content = Template('exclude_hosts_list.j2'),
  305. group = 'hadoop',
  306. )
  307. self.assertResourceCalled('Directory', '/var/run/hadoop',
  308. owner = 'hdfs',
  309. group = 'hadoop',
  310. mode = 0755
  311. )
  312. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  313. owner = 'hdfs',
  314. group = 'hadoop',
  315. create_parents = True,
  316. )
  317. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  318. owner = 'hdfs',
  319. group = 'hadoop',
  320. create_parents = True,
  321. )
  322. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  323. action = ['delete'],
  324. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  325. )
  326. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  327. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  328. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  329. )
  330. self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
  331. user='hdfs',
  332. )
  333. self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
  334. tries=115,
  335. try_sleep=10,
  336. user="hdfs",
  337. logoutput=True
  338. )
  339. self.assertResourceCalled('HdfsResource', '/tmp',
  340. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  341. security_enabled = True,
  342. hadoop_bin_dir = '/usr/bin',
  343. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  344. kinit_path_local = '/usr/bin/kinit',
  345. user = 'hdfs',
  346. owner = 'hdfs',
  347. dfs_type = '',
  348. hadoop_conf_dir = '/etc/hadoop/conf',
  349. type = 'directory',
  350. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
  351. mode = 0777,
  352. only_if = True
  353. )
  354. self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
  355. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  356. security_enabled = True,
  357. hadoop_bin_dir = '/usr/bin',
  358. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  359. kinit_path_local = '/usr/bin/kinit',
  360. user = 'hdfs',
  361. dfs_type = '',
  362. owner = 'ambari-qa',
  363. hadoop_conf_dir = '/etc/hadoop/conf',
  364. type = 'directory',
  365. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
  366. mode = 0770,
  367. only_if = True
  368. )
  369. self.assertResourceCalled('HdfsResource', None,
  370. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  371. security_enabled = True,
  372. only_if = True,
  373. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  374. hadoop_bin_dir = '/usr/bin',
  375. kinit_path_local = '/usr/bin/kinit',
  376. user = 'hdfs',
  377. dfs_type = '',
  378. action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
  379. hadoop_conf_dir = '/etc/hadoop/conf',
  380. )
  381. self.assertNoMoreResources()
  382. def test_stop_secured(self):
  383. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  384. classname = "NameNode",
  385. command = "stop",
  386. config_file = "secured.json",
  387. stack_version = self.STACK_VERSION,
  388. target = RMFTestCase.TARGET_COMMON_SERVICES
  389. )
  390. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode'",
  391. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  392. only_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid")
  393. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',action = ['delete'])
  394. self.assertNoMoreResources()
  395. def test_start_ha_default(self):
  396. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  397. classname = "NameNode",
  398. command = "start",
  399. config_file = "ha_default.json",
  400. stack_version = self.STACK_VERSION,
  401. target = RMFTestCase.TARGET_COMMON_SERVICES
  402. )
  403. self.assert_configure_default()
  404. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  405. owner = 'hdfs',
  406. content = Template('exclude_hosts_list.j2'),
  407. group = 'hadoop',
  408. )
  409. self.assertResourceCalled('Directory', '/var/run/hadoop',
  410. owner = 'hdfs',
  411. group = 'hadoop',
  412. mode = 0755
  413. )
  414. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  415. owner = 'hdfs',
  416. group = 'hadoop',
  417. create_parents = True,
  418. )
  419. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  420. owner = 'hdfs',
  421. group = 'hadoop',
  422. create_parents = True,
  423. )
  424. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  425. action = ['delete'],
  426. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  427. )
  428. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  429. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  430. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  431. )
  432. self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
  433. tries=115,
  434. try_sleep=10,
  435. user="hdfs",
  436. logoutput=True
  437. )
  438. self.assertResourceCalled('HdfsResource', '/tmp',
  439. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  440. security_enabled = False,
  441. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  442. keytab = UnknownConfigurationMock(),
  443. hadoop_bin_dir = '/usr/bin',
  444. default_fs = 'hdfs://ns1',
  445. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  446. kinit_path_local = '/usr/bin/kinit',
  447. principal_name = None,
  448. user = 'hdfs',
  449. dfs_type = '',
  450. owner = 'hdfs',
  451. hadoop_conf_dir = '/etc/hadoop/conf',
  452. type = 'directory',
  453. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  454. mode = 0777,
  455. )
  456. self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
  457. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  458. security_enabled = False,
  459. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  460. keytab = UnknownConfigurationMock(),
  461. hadoop_bin_dir = '/usr/bin',
  462. default_fs = 'hdfs://ns1',
  463. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  464. kinit_path_local = '/usr/bin/kinit',
  465. principal_name = None,
  466. user = 'hdfs',
  467. dfs_type = '',
  468. owner = 'ambari-qa',
  469. hadoop_conf_dir = '/etc/hadoop/conf',
  470. type = 'directory',
  471. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  472. mode = 0770,
  473. )
  474. self.assertResourceCalled('HdfsResource', None,
  475. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  476. security_enabled = False,
  477. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  478. keytab = UnknownConfigurationMock(),
  479. hadoop_bin_dir = '/usr/bin',
  480. default_fs = 'hdfs://ns1',
  481. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  482. kinit_path_local = '/usr/bin/kinit',
  483. principal_name = None,
  484. user = 'hdfs',
  485. dfs_type = '',
  486. action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  487. hadoop_conf_dir = '/etc/hadoop/conf',
  488. )
  489. self.assertNoMoreResources()
  490. @patch.object(shell, "call")
  491. @patch.object(time, "sleep")
  492. def test_start_ha_default_active_with_retry(self, sleep_mock, call_mocks):
  493. call_mocks = MagicMock()
  494. call_mocks.side_effect = [(1, None), (1, None), (1, None), (1, None), (0, None)]
  495. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  496. classname = "NameNode",
  497. command = "start",
  498. config_file = "ha_default.json",
  499. stack_version = self.STACK_VERSION,
  500. target = RMFTestCase.TARGET_COMMON_SERVICES,
  501. call_mocks = call_mocks
  502. )
  503. self.assert_configure_default()
  504. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  505. owner = 'hdfs',
  506. content = Template('exclude_hosts_list.j2'),
  507. group = 'hadoop',
  508. )
  509. self.assertResourceCalled('Directory', '/var/run/hadoop',
  510. owner = 'hdfs',
  511. group = 'hadoop',
  512. mode = 0755
  513. )
  514. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  515. owner = 'hdfs',
  516. group = 'hadoop',
  517. create_parents = True,
  518. )
  519. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  520. owner = 'hdfs',
  521. group = 'hadoop',
  522. create_parents = True,
  523. )
  524. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  525. action = ['delete'],
  526. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  527. )
  528. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  529. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  530. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  531. )
  532. self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
  533. tries=115,
  534. try_sleep=10,
  535. user="hdfs",
  536. logoutput=True
  537. )
  538. self.assertResourceCalled('HdfsResource', '/tmp',
  539. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  540. security_enabled = False,
  541. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  542. keytab = UnknownConfigurationMock(),
  543. hadoop_bin_dir = '/usr/bin',
  544. default_fs = 'hdfs://ns1',
  545. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  546. kinit_path_local = '/usr/bin/kinit',
  547. principal_name = None,
  548. user = 'hdfs',
  549. dfs_type = '',
  550. owner = 'hdfs',
  551. hadoop_conf_dir = '/etc/hadoop/conf',
  552. type = 'directory',
  553. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  554. mode = 0777,
  555. )
  556. self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
  557. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  558. security_enabled = False,
  559. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  560. keytab = UnknownConfigurationMock(),
  561. hadoop_bin_dir = '/usr/bin',
  562. default_fs = 'hdfs://ns1',
  563. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  564. kinit_path_local = '/usr/bin/kinit',
  565. principal_name = None,
  566. user = 'hdfs',
  567. dfs_type = '',
  568. owner = 'ambari-qa',
  569. hadoop_conf_dir = '/etc/hadoop/conf',
  570. type = 'directory',
  571. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  572. mode = 0770,
  573. )
  574. self.assertResourceCalled('HdfsResource', None,
  575. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  576. security_enabled = False,
  577. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  578. keytab = UnknownConfigurationMock(),
  579. hadoop_bin_dir = '/usr/bin',
  580. default_fs = 'hdfs://ns1',
  581. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  582. kinit_path_local = '/usr/bin/kinit',
  583. principal_name = None,
  584. user = 'hdfs',
  585. dfs_type = '',
  586. action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  587. hadoop_conf_dir = '/etc/hadoop/conf',
  588. )
  589. self.assertNoMoreResources()
  590. self.assertTrue(call_mocks.called)
  591. self.assertEqual(5, call_mocks.call_count)
  592. calls = [
  593. call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'"),
  594. call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'"),
  595. call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'"),
  596. call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'"),
  597. call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'")]
  598. call_mocks.assert_has_calls(calls)
  599. def test_start_ha_secured(self):
  600. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  601. classname = "NameNode",
  602. command = "start",
  603. config_file = "ha_secured.json",
  604. stack_version = self.STACK_VERSION,
  605. target = RMFTestCase.TARGET_COMMON_SERVICES
  606. )
  607. self.assert_configure_secured()
  608. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  609. owner = 'hdfs',
  610. content = Template('exclude_hosts_list.j2'),
  611. group = 'hadoop',
  612. )
  613. self.assertResourceCalled('Directory', '/var/run/hadoop',
  614. owner = 'hdfs',
  615. group = 'hadoop',
  616. mode = 0755
  617. )
  618. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  619. owner = 'hdfs',
  620. group = 'hadoop',
  621. create_parents = True,
  622. )
  623. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  624. owner = 'hdfs',
  625. group = 'hadoop',
  626. create_parents = True,
  627. )
  628. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  629. action = ['delete'],
  630. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  631. )
  632. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  633. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  634. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  635. )
  636. self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
  637. user = 'hdfs',
  638. )
  639. self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
  640. tries=115,
  641. try_sleep=10,
  642. user="hdfs",
  643. logoutput=True
  644. )
  645. self.assertResourceCalled('HdfsResource', '/tmp',
  646. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  647. security_enabled = True,
  648. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  649. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  650. hadoop_bin_dir = '/usr/bin',
  651. default_fs = 'hdfs://ns1',
  652. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  653. kinit_path_local = '/usr/bin/kinit',
  654. principal_name = 'hdfs',
  655. user = 'hdfs',
  656. dfs_type = '',
  657. owner = 'hdfs',
  658. hadoop_conf_dir = '/etc/hadoop/conf',
  659. type = 'directory',
  660. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  661. mode = 0777,
  662. )
  663. self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
  664. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  665. security_enabled = True,
  666. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  667. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  668. hadoop_bin_dir = '/usr/bin',
  669. default_fs = 'hdfs://ns1',
  670. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  671. kinit_path_local = '/usr/bin/kinit',
  672. principal_name = 'hdfs',
  673. user = 'hdfs',
  674. dfs_type = '',
  675. owner = 'ambari-qa',
  676. hadoop_conf_dir = '/etc/hadoop/conf',
  677. type = 'directory',
  678. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  679. mode = 0770,
  680. )
  681. self.assertResourceCalled('HdfsResource', None,
  682. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  683. security_enabled = True,
  684. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  685. keytab = '/etc/security/keytabs/hdfs.headless.keytab',
  686. hadoop_bin_dir = '/usr/bin',
  687. default_fs = 'hdfs://ns1',
  688. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  689. kinit_path_local = '/usr/bin/kinit',
  690. principal_name = 'hdfs',
  691. user = 'hdfs',
  692. dfs_type = '',
  693. action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  694. hadoop_conf_dir = '/etc/hadoop/conf',
  695. )
  696. self.assertNoMoreResources()
  697. # tests namenode start command when NameNode HA is enabled, and
  698. # the HA cluster is started initially, rather than using the UI Wizard
  699. def test_start_ha_bootstrap_active_from_blueprint(self):
  700. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  701. classname = "NameNode",
  702. command = "start",
  703. config_file="ha_bootstrap_active_node.json",
  704. stack_version = self.STACK_VERSION,
  705. target = RMFTestCase.TARGET_COMMON_SERVICES
  706. )
  707. self.assert_configure_default()
  708. # verify that active namenode was formatted
  709. self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
  710. self.assertResourceCalled('Execute', 'yes Y | hdfs --config /etc/hadoop/conf namenode -format',
  711. path = ['/usr/bin'],
  712. user = 'hdfs',
  713. )
  714. self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
  715. create_parents = True,
  716. )
  717. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  718. owner = 'hdfs',
  719. content = Template('exclude_hosts_list.j2'),
  720. group = 'hadoop',
  721. )
  722. self.assertResourceCalled('Directory', '/var/run/hadoop',
  723. owner = 'hdfs',
  724. group = 'hadoop',
  725. mode = 0755
  726. )
  727. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  728. owner = 'hdfs',
  729. group = 'hadoop',
  730. create_parents = True,
  731. )
  732. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  733. owner = 'hdfs',
  734. group = 'hadoop',
  735. create_parents = True,
  736. )
  737. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  738. action = ['delete'],
  739. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  740. )
  741. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  742. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  743. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  744. )
  745. self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
  746. tries=115,
  747. try_sleep=10,
  748. user="hdfs",
  749. logoutput=True
  750. )
  751. self.assertResourceCalled('HdfsResource', '/tmp',
  752. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  753. security_enabled = False,
  754. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  755. keytab = UnknownConfigurationMock(),
  756. hadoop_bin_dir = '/usr/bin',
  757. default_fs = 'hdfs://ns1',
  758. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  759. kinit_path_local = '/usr/bin/kinit',
  760. principal_name = None,
  761. user = 'hdfs',
  762. dfs_type = '',
  763. owner = 'hdfs',
  764. hadoop_conf_dir = '/etc/hadoop/conf',
  765. type = 'directory',
  766. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  767. mode = 0777,
  768. )
  769. self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
  770. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  771. security_enabled = False,
  772. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  773. keytab = UnknownConfigurationMock(),
  774. hadoop_bin_dir = '/usr/bin',
  775. default_fs = 'hdfs://ns1',
  776. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  777. kinit_path_local = '/usr/bin/kinit',
  778. principal_name = None,
  779. user = 'hdfs',
  780. dfs_type = '',
  781. owner = 'ambari-qa',
  782. hadoop_conf_dir = '/etc/hadoop/conf',
  783. type = 'directory',
  784. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  785. mode = 0770,
  786. )
  787. self.assertResourceCalled('HdfsResource', None,
  788. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  789. security_enabled = False,
  790. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
  791. keytab = UnknownConfigurationMock(),
  792. hadoop_bin_dir = '/usr/bin',
  793. default_fs = 'hdfs://ns1',
  794. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  795. kinit_path_local = '/usr/bin/kinit',
  796. principal_name = None,
  797. user = 'hdfs',
  798. dfs_type = '',
  799. action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  800. hadoop_conf_dir = '/etc/hadoop/conf',
  801. )
  802. self.assertNoMoreResources()
  803. # tests namenode start command when NameNode HA is enabled, and
  804. # the HA cluster is started initially, rather than using the UI Wizard
  805. # this test verifies the startup of a "standby" namenode
  806. @patch.object(shell, "call")
  807. def test_start_ha_bootstrap_standby_from_blueprint(self, call_mocks):
  808. call_mocks = MagicMock(return_value=(0,""))
  809. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  810. classname = "NameNode",
  811. command = "start",
  812. config_file="ha_bootstrap_standby_node.json",
  813. stack_version = self.STACK_VERSION,
  814. target = RMFTestCase.TARGET_COMMON_SERVICES,
  815. call_mocks = call_mocks
  816. )
  817. self.assert_configure_default()
  818. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  819. owner = 'hdfs',
  820. content = Template('exclude_hosts_list.j2'),
  821. group = 'hadoop',
  822. )
  823. self.assertResourceCalled('Directory', '/var/run/hadoop',
  824. owner = 'hdfs',
  825. group = 'hadoop',
  826. mode = 0755
  827. )
  828. # TODO: Using shell.call() to bootstrap standby which is patched to return status code '5' (i.e. already bootstrapped)
  829. # Need to update the test case to verify that the standby case is detected, and that the bootstrap
  830. # command is run before the namenode launches
  831. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  832. owner = 'hdfs',
  833. group = 'hadoop',
  834. create_parents = True,
  835. )
  836. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  837. owner = 'hdfs',
  838. group = 'hadoop',
  839. create_parents = True,
  840. )
  841. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  842. action = ['delete'],
  843. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  844. )
  845. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  846. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  847. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  848. )
  849. self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6402.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
  850. tries=115,
  851. try_sleep=10,
  852. user="hdfs",
  853. logoutput=True
  854. )
  855. self.assertResourceCalled('HdfsResource', '/tmp',
  856. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  857. security_enabled = False,
  858. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
  859. keytab = UnknownConfigurationMock(),
  860. hadoop_bin_dir = '/usr/bin',
  861. default_fs = 'hdfs://ns1',
  862. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  863. kinit_path_local = '/usr/bin/kinit',
  864. principal_name = None,
  865. user = 'hdfs',
  866. dfs_type = '',
  867. owner = 'hdfs',
  868. hadoop_conf_dir = '/etc/hadoop/conf',
  869. type = 'directory',
  870. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  871. mode = 0777,
  872. )
  873. self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
  874. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  875. security_enabled = False,
  876. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
  877. keytab = UnknownConfigurationMock(),
  878. hadoop_bin_dir = '/usr/bin',
  879. default_fs = 'hdfs://ns1',
  880. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  881. kinit_path_local = '/usr/bin/kinit',
  882. principal_name = None,
  883. user = 'hdfs',
  884. dfs_type = '',
  885. owner = 'ambari-qa',
  886. hadoop_conf_dir = '/etc/hadoop/conf',
  887. type = 'directory',
  888. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  889. mode = 0770,
  890. )
  891. self.assertResourceCalled('HdfsResource', None,
  892. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  893. security_enabled = False,
  894. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
  895. keytab = UnknownConfigurationMock(),
  896. hadoop_bin_dir = '/usr/bin',
  897. default_fs = 'hdfs://ns1',
  898. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  899. kinit_path_local = '/usr/bin/kinit',
  900. principal_name = None,
  901. user = 'hdfs',
  902. dfs_type = '',
  903. action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  904. hadoop_conf_dir = '/etc/hadoop/conf',
  905. )
  906. self.assertNoMoreResources()
  907. self.assertTrue(call_mocks.called)
  908. self.assertEqual(2, call_mocks.call_count)
  909. calls = [
  910. call('hdfs namenode -bootstrapStandby -nonInteractive', logoutput=False, user=u'hdfs'),
  911. call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'")]
  912. call_mocks.assert_has_calls(calls, any_order=False)
  913. # tests namenode start command when NameNode HA is enabled, and
  914. # the HA cluster is started initially, rather than using the UI Wizard
  915. # this test verifies the startup of a "standby" namenode
  916. @patch.object(shell, "call")
  917. def test_start_ha_bootstrap_standby_from_blueprint_initial_start(self, call_mocks):
  918. call_mocks = MagicMock()
  919. call_mocks.side_effect = [(1, None), (0, None), (0, None)]
  920. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  921. classname = "NameNode",
  922. command = "start",
  923. config_file="ha_bootstrap_standby_node_initial_start.json",
  924. stack_version = self.STACK_VERSION,
  925. target = RMFTestCase.TARGET_COMMON_SERVICES,
  926. call_mocks = call_mocks
  927. )
  928. self.assert_configure_default()
  929. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  930. owner = 'hdfs',
  931. content = Template('exclude_hosts_list.j2'),
  932. group = 'hadoop',
  933. )
  934. self.assertResourceCalled('Directory', '/var/run/hadoop',
  935. owner = 'hdfs',
  936. group = 'hadoop',
  937. mode = 0755
  938. )
  939. # TODO: Using shell.call() to bootstrap standby which is patched to return status code '5' (i.e. already bootstrapped)
  940. # Need to update the test case to verify that the standby case is detected, and that the bootstrap
  941. # command is run before the namenode launches
  942. self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
  943. owner = 'hdfs',
  944. group = 'hadoop',
  945. create_parents = True,
  946. )
  947. self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
  948. owner = 'hdfs',
  949. group = 'hadoop',
  950. create_parents = True,
  951. )
  952. self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
  953. action = ['delete'],
  954. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  955. )
  956. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
  957. environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
  958. not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
  959. )
  960. self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6402.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
  961. tries=115,
  962. try_sleep=10,
  963. user="hdfs",
  964. logoutput=True
  965. )
  966. self.assertResourceCalled('HdfsResource', '/tmp',
  967. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  968. security_enabled = False,
  969. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
  970. keytab = UnknownConfigurationMock(),
  971. hadoop_bin_dir = '/usr/bin',
  972. default_fs = 'hdfs://ns1',
  973. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  974. kinit_path_local = '/usr/bin/kinit',
  975. principal_name = None,
  976. user = 'hdfs',
  977. dfs_type = '',
  978. owner = 'hdfs',
  979. hadoop_conf_dir = '/etc/hadoop/conf',
  980. type = 'directory',
  981. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  982. mode = 0777,
  983. )
  984. self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
  985. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  986. security_enabled = False,
  987. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
  988. keytab = UnknownConfigurationMock(),
  989. hadoop_bin_dir = '/usr/bin',
  990. default_fs = 'hdfs://ns1',
  991. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  992. kinit_path_local = '/usr/bin/kinit',
  993. principal_name = None,
  994. user = 'hdfs',
  995. dfs_type = '',
  996. owner = 'ambari-qa',
  997. hadoop_conf_dir = '/etc/hadoop/conf',
  998. type = 'directory',
  999. action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  1000. mode = 0770,
  1001. )
  1002. self.assertResourceCalled('HdfsResource', None,
  1003. immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
  1004. security_enabled = False,
  1005. only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
  1006. keytab = UnknownConfigurationMock(),
  1007. hadoop_bin_dir = '/usr/bin',
  1008. default_fs = 'hdfs://ns1',
  1009. hdfs_site = self.getConfig()['configurations']['hdfs-site'],
  1010. kinit_path_local = '/usr/bin/kinit',
  1011. principal_name = None,
  1012. user = 'hdfs',
  1013. dfs_type = '',
  1014. action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
  1015. hadoop_conf_dir = '/etc/hadoop/conf',
  1016. )
  1017. self.assertNoMoreResources()
  1018. self.assertTrue(call_mocks.called)
  1019. self.assertEqual(3, call_mocks.call_count)
  1020. calls = [
  1021. call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'"),
  1022. call('hdfs namenode -bootstrapStandby -nonInteractive -force', logoutput=False, user=u'hdfs'),
  1023. call('hdfs namenode -bootstrapStandby -nonInteractive -force', logoutput=False, user=u'hdfs')]
  1024. call_mocks.assert_has_calls(calls, any_order=True)
  1025. def test_decommission_default(self):
  1026. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1027. classname = "NameNode",
  1028. command = "decommission",
  1029. config_file = "default.json",
  1030. stack_version = self.STACK_VERSION,
  1031. target = RMFTestCase.TARGET_COMMON_SERVICES
  1032. )
  1033. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  1034. owner = 'hdfs',
  1035. content = Template('exclude_hosts_list.j2'),
  1036. group = 'hadoop',
  1037. )
  1038. self.assertResourceCalled('Execute', '', user = 'hdfs')
  1039. self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
  1040. user = 'hdfs',
  1041. conf_dir = '/etc/hadoop/conf',
  1042. bin_dir = '/usr/bin')
  1043. self.assertNoMoreResources()
  1044. def test_decommission_update_exclude_file_only(self):
  1045. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1046. classname = "NameNode",
  1047. command = "decommission",
  1048. config_file = "default_update_exclude_file_only.json",
  1049. stack_version = self.STACK_VERSION,
  1050. target = RMFTestCase.TARGET_COMMON_SERVICES
  1051. )
  1052. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  1053. owner = 'hdfs',
  1054. content = Template('exclude_hosts_list.j2'),
  1055. group = 'hadoop',
  1056. )
  1057. self.assertNoMoreResources()
  1058. def test_decommission_ha_default(self):
  1059. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1060. classname = "NameNode",
  1061. command = "decommission",
  1062. config_file = "ha_default.json",
  1063. stack_version = self.STACK_VERSION,
  1064. target = RMFTestCase.TARGET_COMMON_SERVICES
  1065. )
  1066. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  1067. owner = 'hdfs',
  1068. content = Template('exclude_hosts_list.j2'),
  1069. group = 'hadoop',
  1070. )
  1071. self.assertResourceCalled('Execute', '', user = 'hdfs')
  1072. self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
  1073. user = 'hdfs',
  1074. conf_dir = '/etc/hadoop/conf',
  1075. bin_dir = '/usr/bin')
  1076. self.assertNoMoreResources()
  1077. def test_decommission_secured(self):
  1078. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1079. classname = "NameNode",
  1080. command = "decommission",
  1081. config_file = "secured.json",
  1082. stack_version = self.STACK_VERSION,
  1083. target = RMFTestCase.TARGET_COMMON_SERVICES
  1084. )
  1085. self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
  1086. owner = 'hdfs',
  1087. content = Template('exclude_hosts_list.j2'),
  1088. group = 'hadoop',
  1089. )
  1090. self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6401.ambari.apache.org@EXAMPLE.COM;',
  1091. user = 'hdfs',
  1092. )
  1093. self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
  1094. bin_dir = '/usr/bin',
  1095. conf_dir = '/etc/hadoop/conf',
  1096. user = 'hdfs',
  1097. )
  1098. self.assertNoMoreResources()
  1099. def assert_configure_default(self):
  1100. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
  1101. create_parents = True,
  1102. )
  1103. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64',
  1104. create_parents = True,
  1105. )
  1106. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
  1107. to = '/usr/lib/hadoop/lib/libsnappy.so',
  1108. )
  1109. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
  1110. to = '/usr/lib/hadoop/lib64/libsnappy.so',
  1111. )
  1112. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  1113. owner = 'root',
  1114. group = 'root',
  1115. create_parents = True,
  1116. )
  1117. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  1118. content = Template('hdfs.conf.j2'),
  1119. owner = 'root',
  1120. group = 'root',
  1121. mode = 0644,
  1122. )
  1123. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  1124. owner = 'hdfs',
  1125. group = 'hadoop',
  1126. conf_dir = '/etc/hadoop/conf',
  1127. configurations = self.getConfig()['configurations']['hdfs-site'],
  1128. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  1129. )
  1130. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  1131. owner = 'hdfs',
  1132. group = 'hadoop',
  1133. conf_dir = '/etc/hadoop/conf',
  1134. configurations = self.getConfig()['configurations']['core-site'],
  1135. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  1136. mode = 0644
  1137. )
  1138. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  1139. content = Template('slaves.j2'),
  1140. owner = 'hdfs',
  1141. )
  1142. self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
  1143. owner = 'hdfs',
  1144. group = 'hadoop',
  1145. create_parents = True,
  1146. mode = 0755,
  1147. cd_access='a'
  1148. )
  1149. def assert_configure_secured(self):
  1150. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
  1151. create_parents = True,
  1152. )
  1153. self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64',
  1154. create_parents = True,
  1155. )
  1156. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
  1157. to = '/usr/lib/hadoop/lib/libsnappy.so',
  1158. )
  1159. self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
  1160. to = '/usr/lib/hadoop/lib64/libsnappy.so',
  1161. )
  1162. self.assertResourceCalled('Directory', '/etc/security/limits.d',
  1163. owner = 'root',
  1164. group = 'root',
  1165. create_parents = True,
  1166. )
  1167. self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
  1168. content = Template('hdfs.conf.j2'),
  1169. owner = 'root',
  1170. group = 'root',
  1171. mode = 0644,
  1172. )
  1173. self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
  1174. owner = 'hdfs',
  1175. group = 'hadoop',
  1176. conf_dir = '/etc/hadoop/conf',
  1177. configurations = self.getConfig()['configurations']['hdfs-site'],
  1178. configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
  1179. )
  1180. self.assertResourceCalled('XmlConfig', 'core-site.xml',
  1181. owner = 'hdfs',
  1182. group = 'hadoop',
  1183. conf_dir = '/etc/hadoop/conf',
  1184. configurations = self.getConfig()['configurations']['core-site'],
  1185. configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
  1186. mode = 0644
  1187. )
  1188. self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
  1189. content = Template('slaves.j2'),
  1190. owner = 'root',
  1191. )
  1192. self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
  1193. owner = 'hdfs',
  1194. group = 'hadoop',
  1195. create_parents = True,
  1196. mode = 0755,
  1197. cd_access='a'
  1198. )
  1199. @patch("resource_management.libraries.script.Script.put_structured_out")
  1200. def test_rebalance_hdfs(self, pso):
  1201. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1202. classname = "NameNode",
  1203. command = "rebalancehdfs",
  1204. config_file = "rebalancehdfs_default.json",
  1205. stack_version = self.STACK_VERSION,
  1206. target = RMFTestCase.TARGET_COMMON_SERVICES
  1207. )
  1208. self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf balancer -threshold -1'",
  1209. logoutput = False,
  1210. on_new_line = FunctionMock('handle_new_line'),
  1211. )
  1212. self.assertNoMoreResources()
  1213. @patch("resource_management.libraries.script.Script.put_structured_out")
  1214. @patch("os.system")
  1215. def test_rebalance_secured_hdfs(self, pso, system_mock):
  1216. system_mock.return_value = -1
  1217. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1218. classname = "NameNode",
  1219. command = "rebalancehdfs",
  1220. config_file = "rebalancehdfs_secured.json",
  1221. stack_version = self.STACK_VERSION,
  1222. target = RMFTestCase.TARGET_COMMON_SERVICES,
  1223. call_mocks=[(1, "no kinit")]
  1224. )
  1225. tempdir = tempfile.gettempdir()
  1226. ccache_path = os.path.join(tempfile.gettempdir(), "hdfs_rebalance_cc_7add60ca651f1bd1ed909a6668937ba9")
  1227. kinit_cmd = "/usr/bin/kinit -c {0} -kt /etc/security/keytabs/hdfs.headless.keytab hdfs@EXAMPLE.COM".format(ccache_path)
  1228. rebalance_cmd = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin KRB5CCNAME={0} ; hdfs --config /etc/hadoop/conf balancer -threshold -1'".format(ccache_path)
  1229. self.assertResourceCalled('Execute', kinit_cmd,
  1230. user = 'hdfs',
  1231. )
  1232. self.assertResourceCalled('Execute', rebalance_cmd,
  1233. logoutput = False,
  1234. on_new_line = FunctionMock('handle_new_line'),
  1235. )
  1236. self.assertResourceCalled('File', ccache_path,
  1237. action = ['delete'],
  1238. )
  1239. self.assertNoMoreResources()
  1240. @patch("os.path.isfile")
  1241. def test_ranger_installed_missing_file(self, isfile_mock):
  1242. """
  1243. Tests that when Ranger is enabled for HDFS, that an exception is thrown
  1244. if there is no install.properties found
  1245. :return:
  1246. """
  1247. isfile_mock.return_value = False
  1248. try:
  1249. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1250. classname = "NameNode", command = "start", config_file = "ranger-namenode-start.json",
  1251. stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES )
  1252. self.fail("Expected a failure since the ranger install.properties was missing")
  1253. except Fail, failure:
  1254. pass
  1255. self.assertTrue(isfile_mock.called)
  1256. @patch("resource_management.libraries.functions.security_commons.build_expectations")
  1257. @patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
  1258. @patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
  1259. @patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
  1260. @patch("resource_management.libraries.script.Script.put_structured_out")
  1261. def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
  1262. # Test that function works when is called with correct parameters
  1263. security_params = {
  1264. 'core-site': {
  1265. 'hadoop.security.authentication': 'kerberos'
  1266. },
  1267. 'hdfs-site': {
  1268. 'dfs.namenode.keytab.file': 'path/to/namenode/keytab/file',
  1269. 'dfs.namenode.kerberos.principal': 'namenode_principal'
  1270. }
  1271. }
  1272. props_value_check = None
  1273. props_empty_check = ['dfs.namenode.kerberos.internal.spnego.principal',
  1274. 'dfs.namenode.keytab.file',
  1275. 'dfs.namenode.kerberos.principal']
  1276. props_read_check = ['dfs.namenode.keytab.file']
  1277. result_issues = []
  1278. get_params_mock.return_value = security_params
  1279. validate_security_config_mock.return_value = result_issues
  1280. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1281. classname = "NameNode",
  1282. command = "security_status",
  1283. config_file="secured.json",
  1284. stack_version = self.STACK_VERSION,
  1285. target = RMFTestCase.TARGET_COMMON_SERVICES
  1286. )
  1287. build_exp_mock.assert_called_with('hdfs-site', props_value_check, props_empty_check, props_read_check)
  1288. put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
  1289. cached_kinit_executor_mock.called_with('/usr/bin/kinit',
  1290. self.config_dict['configurations']['hadoop-env']['hdfs_user'],
  1291. security_params['hdfs-site']['dfs.namenode.keytab.file'],
  1292. security_params['hdfs-site']['dfs.namenode.kerberos.principal'],
  1293. self.config_dict['hostname'],
  1294. '/tmp')
  1295. # Testing when hadoop.security.authentication is simple
  1296. security_params['core-site']['hadoop.security.authentication'] = 'simple'
  1297. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1298. classname = "NameNode",
  1299. command = "security_status",
  1300. config_file="secured.json",
  1301. stack_version = self.STACK_VERSION,
  1302. target = RMFTestCase.TARGET_COMMON_SERVICES
  1303. )
  1304. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  1305. security_params['core-site']['hadoop.security.authentication'] = 'kerberos'
  1306. # Testing that the exception throw by cached_executor is caught
  1307. cached_kinit_executor_mock.reset_mock()
  1308. cached_kinit_executor_mock.side_effect = Exception("Invalid command")
  1309. try:
  1310. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1311. classname = "NameNode",
  1312. command = "security_status",
  1313. config_file="secured.json",
  1314. stack_version = self.STACK_VERSION,
  1315. target = RMFTestCase.TARGET_COMMON_SERVICES
  1316. )
  1317. except:
  1318. self.assertTrue(True)
  1319. # Testing with a security_params which doesn't contains hdfs-site
  1320. empty_security_params = {
  1321. 'core-site': {
  1322. 'hadoop.security.authentication': 'kerberos'
  1323. }
  1324. }
  1325. cached_kinit_executor_mock.reset_mock()
  1326. get_params_mock.reset_mock()
  1327. put_structured_out_mock.reset_mock()
  1328. get_params_mock.return_value = empty_security_params
  1329. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1330. classname = "NameNode",
  1331. command = "security_status",
  1332. config_file="secured.json",
  1333. stack_version = self.STACK_VERSION,
  1334. target = RMFTestCase.TARGET_COMMON_SERVICES
  1335. )
  1336. put_structured_out_mock.assert_called_with({"securityIssuesFound": "Keytab file or principal are not set property."})
  1337. # Testing with not empty result_issues
  1338. result_issues_with_params = {
  1339. 'hdfs-site': "Something bad happened"
  1340. }
  1341. validate_security_config_mock.reset_mock()
  1342. get_params_mock.reset_mock()
  1343. validate_security_config_mock.return_value = result_issues_with_params
  1344. get_params_mock.return_value = security_params
  1345. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1346. classname = "NameNode",
  1347. command = "security_status",
  1348. config_file="secured.json",
  1349. stack_version = self.STACK_VERSION,
  1350. target = RMFTestCase.TARGET_COMMON_SERVICES
  1351. )
  1352. put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
  1353. @patch("utils.get_namenode_states")
  1354. def test_upgrade_restart(self, get_namenode_states_mock):
  1355. # Execution of nn_ru_lzo invokes a code path that invokes lzo installation, which
  1356. # was failing in RU case. See hdfs.py and the lzo_enabled check that is in it.
  1357. # Just executing the script is enough to test the fix
  1358. active_namenodes = [('nn1', 'c6401.ambari.apache.org:50070')]
  1359. standby_namenodes = [('nn2', 'c6402.ambari.apache.org:50070')]
  1360. unknown_namenodes = []
  1361. get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
  1362. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1363. classname = "NameNode",
  1364. command = "restart",
  1365. config_file = "nn_ru_lzo.json",
  1366. stack_version = self.STACK_VERSION,
  1367. target = RMFTestCase.TARGET_COMMON_SERVICES)
  1368. unknown_namenodes = active_namenodes
  1369. active_namenodes = []
  1370. get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
  1371. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1372. classname = "NameNode",
  1373. command = "restart",
  1374. config_file = "nn_ru_lzo.json",
  1375. stack_version = self.STACK_VERSION,
  1376. target = RMFTestCase.TARGET_COMMON_SERVICES)
  1377. self.assertFalse(0 == len(Script.structuredOut))
  1378. self.assertTrue(Script.structuredOut.has_key("upgrade_type"))
  1379. self.assertTrue(Script.structuredOut.has_key("direction"))
  1380. self.assertEquals("rolling_upgrade", Script.structuredOut["upgrade_type"])
  1381. self.assertEquals("UPGRADE", Script.structuredOut["direction"])
  1382. @patch("utils.get_namenode_states")
  1383. def test_upgrade_restart_eu(self, get_namenode_states_mock):
  1384. active_namenodes = [('nn1', 'c6401.ambari.apache.org:50070')]
  1385. standby_namenodes = [('nn2', 'c6402.ambari.apache.org:50070')]
  1386. unknown_namenodes = []
  1387. mocks_dict = {}
  1388. get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
  1389. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1390. classname = "NameNode",
  1391. command = "restart",
  1392. config_file = "nn_eu_standby.json",
  1393. stack_version = self.STACK_VERSION,
  1394. target = RMFTestCase.TARGET_COMMON_SERVICES,
  1395. call_mocks = [(0, None, ''), (0, None)],
  1396. mocks_dict=mocks_dict)
  1397. calls = mocks_dict['call'].call_args_list
  1398. self.assertTrue(len(calls) >= 1)
  1399. self.assertTrue(calls[0].startsWith("conf-select create-conf-dir --package hadoop --stack-version 2.3.2.0-2844 --conf-version 0"))
  1400. @patch("hdfs_namenode.is_active_namenode")
  1401. @patch("resource_management.libraries.functions.setup_ranger_plugin_xml.setup_ranger_plugin")
  1402. @patch("utils.get_namenode_states")
  1403. def test_upgrade_restart_eu_with_ranger(self, get_namenode_states_mock, setup_ranger_plugin_mock, is_active_nn_mock):
  1404. is_active_nn_mock.return_value = True
  1405. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_eu.json"
  1406. with open(config_file, "r") as f:
  1407. json_content = json.load(f)
  1408. version = '2.3.4.0-1111'
  1409. json_content['commandParams']['version'] = version
  1410. active_namenodes = [('nn1', 'c6401.ambari.apache.org:50070')]
  1411. standby_namenodes = [('nn2', 'c6402.ambari.apache.org:50070')]
  1412. unknown_namenodes = []
  1413. mocks_dict = {}
  1414. get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
  1415. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1416. classname = "NameNode",
  1417. command = "start",
  1418. command_args=["nonrolling"],
  1419. config_dict = json_content,
  1420. stack_version = self.STACK_VERSION,
  1421. target = RMFTestCase.TARGET_COMMON_SERVICES,
  1422. call_mocks = [(0, None, ''), (0, None)],
  1423. mocks_dict=mocks_dict)
  1424. self.assertTrue(setup_ranger_plugin_mock.called)
  1425. self.assertResourceCalledByIndex(7, 'Execute',
  1426. ('mv', '/usr/hdp/2.3.4.0-1111/hadoop/conf/set-hdfs-plugin-env.sh', '/usr/hdp/2.3.4.0-1111/hadoop/conf/set-hdfs-plugin-env.sh.bak'),
  1427. only_if='test -f /usr/hdp/2.3.4.0-1111/hadoop/conf/set-hdfs-plugin-env.sh',
  1428. sudo=True)
  1429. def test_pre_upgrade_restart(self):
  1430. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
  1431. with open(config_file, "r") as f:
  1432. json_content = json.load(f)
  1433. version = '2.2.1.0-3242'
  1434. json_content['commandParams']['version'] = version
  1435. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1436. classname = "NameNode",
  1437. command = "pre_upgrade_restart",
  1438. config_dict = json_content,
  1439. stack_version = self.STACK_VERSION,
  1440. target = RMFTestCase.TARGET_COMMON_SERVICES)
  1441. self.assertResourceCalled('Execute',
  1442. ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-hdfs-namenode', version), sudo=True)
  1443. @patch("resource_management.core.shell.call")
  1444. def test_pre_upgrade_restart_23(self, call_mock):
  1445. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
  1446. with open(config_file, "r") as f:
  1447. json_content = json.load(f)
  1448. version = '2.3.0.0-1234'
  1449. json_content['commandParams']['version'] = version
  1450. mocks_dict = {}
  1451. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1452. classname = "NameNode",
  1453. command = "pre_upgrade_restart",
  1454. config_dict = json_content,
  1455. stack_version = self.STACK_VERSION,
  1456. target = RMFTestCase.TARGET_COMMON_SERVICES,
  1457. call_mocks = [(0, None), (0, None)],
  1458. mocks_dict = mocks_dict)
  1459. self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-hdfs-namenode', version), sudo=True)
  1460. self.assertNoMoreResources()
  1461. def test_post_upgrade_restart(self):
  1462. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
  1463. with open(config_file, "r") as f:
  1464. json_content = json.load(f)
  1465. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1466. classname = "NameNode",
  1467. command = "post_upgrade_restart",
  1468. config_dict = json_content,
  1469. stack_version = self.STACK_VERSION,
  1470. target = RMFTestCase.TARGET_COMMON_SERVICES)
  1471. self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -report -live',
  1472. user='hdfs',
  1473. tries=60,
  1474. try_sleep=10
  1475. )
  1476. self.assertNoMoreResources()
  1477. def test_post_upgrade_ha_restart(self):
  1478. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/ha_default.json"
  1479. with open(config_file, "r") as f:
  1480. json_content = json.load(f)
  1481. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1482. classname = "NameNode",
  1483. command = "post_upgrade_restart",
  1484. config_dict = json_content,
  1485. stack_version = self.STACK_VERSION,
  1486. target = RMFTestCase.TARGET_COMMON_SERVICES)
  1487. self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://ns1 -report -live',
  1488. user='hdfs',
  1489. tries=60,
  1490. try_sleep=10
  1491. )
  1492. self.assertNoMoreResources()
  1493. def test_prepare_rolling_upgrade__upgrade(self):
  1494. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/secured.json"
  1495. with open(config_file, "r") as f:
  1496. json_content = json.load(f)
  1497. json_content['commandParams']['upgrade_direction'] = 'upgrade'
  1498. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1499. classname = "NameNode",
  1500. command = "prepare_rolling_upgrade",
  1501. config_dict = json_content,
  1502. stack_version = self.STACK_VERSION,
  1503. target = RMFTestCase.TARGET_COMMON_SERVICES,
  1504. call_mocks = [(0, "Safe mode is OFF in c6401.ambari.apache.org")])
  1505. self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
  1506. logoutput = True, user = 'hdfs')
  1507. self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -rollingUpgrade prepare',
  1508. logoutput = True, user = 'hdfs')
  1509. self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -rollingUpgrade query',
  1510. logoutput = True, user = 'hdfs')
  1511. self.assertNoMoreResources()
  1512. def test_prepare_rolling_upgrade__upgrade(self):
  1513. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/ha_secured.json"
  1514. with open(config_file, "r") as f:
  1515. json_content = json.load(f)
  1516. json_content['commandParams']['upgrade_direction'] = 'upgrade'
  1517. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1518. classname = "NameNode",
  1519. command = "prepare_rolling_upgrade",
  1520. config_dict = json_content,
  1521. stack_version = self.STACK_VERSION,
  1522. target = RMFTestCase.TARGET_COMMON_SERVICES,
  1523. call_mocks = [(0, "Safe mode is OFF in c6401.ambari.apache.org")])
  1524. self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
  1525. logoutput = True, user = 'hdfs')
  1526. self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://ns1 -rollingUpgrade prepare',
  1527. logoutput = True, user = 'hdfs')
  1528. self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://ns1 -rollingUpgrade query',
  1529. logoutput = True, user = 'hdfs')
  1530. self.assertNoMoreResources()
  1531. @patch.object(shell, "call")
  1532. def test_prepare_rolling_upgrade__downgrade(self, shell_call_mock):
  1533. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/secured.json"
  1534. with open(config_file, "r") as f:
  1535. json_content = json.load(f)
  1536. json_content['commandParams']['upgrade_direction'] = 'downgrade'
  1537. # Mock safemode_check call
  1538. shell_call_mock.return_value = 0, "Safe mode is OFF in c6401.ambari.apache.org"
  1539. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1540. classname = "NameNode",
  1541. command = "prepare_rolling_upgrade",
  1542. config_dict = json_content,
  1543. stack_version = self.STACK_VERSION,
  1544. target = RMFTestCase.TARGET_COMMON_SERVICES)
  1545. self.assertResourceCalled('Execute',
  1546. '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
  1547. logoutput = True, user = 'hdfs')
  1548. self.assertNoMoreResources()
  1549. def test_finalize_rolling_upgrade(self):
  1550. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
  1551. with open(config_file, "r") as f:
  1552. json_content = json.load(f)
  1553. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1554. classname = "NameNode",
  1555. command = "finalize_rolling_upgrade",
  1556. config_dict = json_content,
  1557. stack_version = self.STACK_VERSION,
  1558. target = RMFTestCase.TARGET_COMMON_SERVICES)
  1559. self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -rollingUpgrade query',
  1560. logoutput = True,
  1561. user = 'hdfs',
  1562. )
  1563. self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -rollingUpgrade finalize',
  1564. logoutput = True,
  1565. user = 'hdfs',
  1566. )
  1567. self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -rollingUpgrade query',
  1568. logoutput = True,
  1569. user = 'hdfs',
  1570. )
  1571. self.assertNoMoreResources()
  1572. def test_finalize_ha_rolling_upgrade(self):
  1573. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/ha_default.json"
  1574. with open(config_file, "r") as f:
  1575. json_content = json.load(f)
  1576. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1577. classname = "NameNode",
  1578. command = "finalize_rolling_upgrade",
  1579. config_dict = json_content,
  1580. stack_version = self.STACK_VERSION,
  1581. target = RMFTestCase.TARGET_COMMON_SERVICES)
  1582. self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://ns1 -rollingUpgrade query',
  1583. logoutput = True,
  1584. user = 'hdfs',
  1585. )
  1586. self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://ns1 -rollingUpgrade finalize',
  1587. logoutput = True,
  1588. user = 'hdfs',
  1589. )
  1590. self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://ns1 -rollingUpgrade query',
  1591. logoutput = True,
  1592. user = 'hdfs',
  1593. )
  1594. self.assertNoMoreResources()
  1595. @patch.object(shell, "call")
  1596. def test_pre_upgrade_restart_21_and_lower_params(self, call_mock):
  1597. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
  1598. with open(config_file, "r") as f:
  1599. json_content = json.load(f)
  1600. json_content['hostLevelParams']['stack_name'] = 'HDP'
  1601. json_content['hostLevelParams']['stack_version'] = '2.0'
  1602. mocks_dict = {}
  1603. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1604. classname = "NameNode",
  1605. command = "pre_upgrade_restart",
  1606. config_dict = json_content,
  1607. stack_version = self.STACK_VERSION,
  1608. target = RMFTestCase.TARGET_COMMON_SERVICES,
  1609. call_mocks = [(0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None)],
  1610. mocks_dict = mocks_dict)
  1611. import sys
  1612. self.assertEquals("/etc/hadoop/conf", sys.modules["params"].hadoop_conf_dir)
  1613. self.assertEquals("/usr/lib/hadoop/libexec", sys.modules["params"].hadoop_libexec_dir)
  1614. self.assertEquals("/usr/bin", sys.modules["params"].hadoop_bin_dir)
  1615. self.assertEquals("/usr/lib/hadoop/sbin", sys.modules["params"].hadoop_bin)
  1616. @patch.object(shell, "call")
  1617. def test_pre_upgrade_restart_22_params(self, call_mock):
  1618. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
  1619. with open(config_file, "r") as f:
  1620. json_content = json.load(f)
  1621. version = '2.2.0.0-1234'
  1622. del json_content['commandParams']['version']
  1623. json_content['hostLevelParams']['stack_name'] = 'HDP'
  1624. json_content['hostLevelParams']['stack_version'] = '2.2'
  1625. json_content['commandParams']['version'] = version
  1626. mocks_dict = {}
  1627. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1628. classname = "NameNode",
  1629. command = "pre_upgrade_restart",
  1630. config_dict = json_content,
  1631. stack_version = self.STACK_VERSION,
  1632. target = RMFTestCase.TARGET_COMMON_SERVICES,
  1633. call_mocks = [(0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None)],
  1634. mocks_dict = mocks_dict)
  1635. import sys
  1636. self.assertEquals("/usr/hdp/current/hadoop-client/conf", sys.modules["params"].hadoop_conf_dir)
  1637. self.assertEquals("/usr/hdp/{0}/hadoop/libexec".format(version), sys.modules["params"].hadoop_libexec_dir)
  1638. self.assertEquals("/usr/hdp/{0}/hadoop/bin".format(version), sys.modules["params"].hadoop_bin_dir)
  1639. self.assertEquals("/usr/hdp/{0}/hadoop/sbin".format(version), sys.modules["params"].hadoop_bin)
  1640. @patch.object(shell, "call")
  1641. def test_pre_upgrade_restart_23_params(self, call_mock):
  1642. import itertools
  1643. config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
  1644. with open(config_file, "r") as f:
  1645. json_content = json.load(f)
  1646. version = '2.3.0.0-1234'
  1647. json_content['commandParams']['version'] = version
  1648. json_content['commandParams']['upgrade_direction'] = 'upgrade'
  1649. json_content['hostLevelParams']['stack_name'] = 'HDP'
  1650. json_content['hostLevelParams']['stack_version'] = '2.3'
  1651. mocks_dict = {}
  1652. self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
  1653. classname = "NameNode",
  1654. command = "pre_upgrade_restart",
  1655. config_dict = json_content,
  1656. stack_version = self.STACK_VERSION,
  1657. target = RMFTestCase.TARGET_COMMON_SERVICES,
  1658. call_mocks = itertools.cycle([(0, None)]),
  1659. mocks_dict = mocks_dict)
  1660. import sys
  1661. self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/conf", sys.modules["params"].hadoop_conf_dir)
  1662. self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/libexec", sys.modules["params"].hadoop_libexec_dir)
  1663. self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/bin", sys.modules["params"].hadoop_bin_dir)
  1664. self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/sbin", sys.modules["params"].hadoop_bin)
  1665. class Popen_Mock:
  1666. return_value = 1
  1667. lines = ['Time Stamp Iteration# Bytes Already Moved Bytes Left To Move Bytes Being Moved\n',
  1668. 'Jul 28, 2014 5:01:49 PM 0 0 B 5.74 GB 9.79 GB\n',
  1669. 'Jul 28, 2014 5:03:00 PM 1 0 B 5.58 GB 9.79 GB\n',
  1670. '']
  1671. def __call__(self, *args,**kwargs):
  1672. popen = MagicMock()
  1673. popen.returncode = Popen_Mock.return_value
  1674. popen.stdout.readline = MagicMock(side_effect = Popen_Mock.lines)
  1675. return popen