12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793 |
- #!/usr/bin/env python
- '''
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- from ambari_commons import OSCheck
- '''
- import json
- import os
- import tempfile
- import time
- from stacks.utils.RMFTestCase import *
- from mock.mock import MagicMock, patch, call
- from resource_management.libraries.script.script import Script
- from resource_management.core import shell
- from resource_management.core.exceptions import Fail
- class TestNamenode(RMFTestCase):
- COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package"
- STACK_VERSION = "2.0.6"
- DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done', '/app-logs', '/tmp']
- def test_configure_default(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "configure",
- config_file = "default.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assert_configure_default()
- self.assertNoMoreResources()
- def test_start_default_alt_fs(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "start",
- config_file = "altfs_plus_hdfs.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = [(0,"")],
- )
- self.assert_configure_default()
- self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
- self.assertResourceCalled('Execute', 'yes Y | hdfs --config /etc/hadoop/conf namenode -format',
- path = ['/usr/bin'],
- user = 'hdfs',
- )
- self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
- create_parents = True,
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop',
- owner = 'hdfs',
- group = 'hadoop',
- mode = 0755
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
- owner = 'hdfs',
- group = 'hadoop',
- create_parents = True,
- )
- self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
- owner = 'hdfs',
- group = 'hadoop',
- create_parents = True,
- )
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
- action = ['delete'],
- not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
- environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
- not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
- )
- self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6405.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
- tries=115,
- try_sleep=10,
- user="hdfs",
- logoutput=True
- )
- self.assertResourceCalled('HdfsResource', '/tmp',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if=True,
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'wasb://abc@c6401.ambari.apache.org',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- owner = 'hdfs',
- dfs_type = '',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- mode = 0777,
- )
- self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if=True,
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'wasb://abc@c6401.ambari.apache.org',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- dfs_type = '',
- owner = 'ambari-qa',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- mode = 0770,
- )
- self.assertResourceCalled('HdfsResource', None,
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if=True,
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'wasb://abc@c6401.ambari.apache.org',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- dfs_type = '',
- action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- hadoop_conf_dir = '/etc/hadoop/conf',
- )
- self.assertNoMoreResources()
- def test_install_default(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "install",
- config_file = "default_no_install.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- try_install=True
- )
- self.assert_configure_default()
- self.assertNoMoreResources()
- pass
- def test_start_default(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "start",
- config_file = "default.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = [(0,"")],
- )
- self.assert_configure_default()
- self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
- self.assertResourceCalled('Execute', 'yes Y | hdfs --config /etc/hadoop/conf namenode -format',
- path = ['/usr/bin'],
- user = 'hdfs',
- )
- self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
- create_parents = True,
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop',
- owner = 'hdfs',
- group = 'hadoop',
- mode = 0755
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
- owner = 'hdfs',
- create_parents = True,
- group = 'hadoop'
- )
- self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
- owner = 'hdfs',
- create_parents = True,
- group = 'hadoop'
- )
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
- action = ['delete'],
- not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
- environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
- not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
- )
- self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
- tries=115,
- try_sleep=10,
- user="hdfs",
- logoutput=True
- )
- self.assertResourceCalled('HdfsResource', '/tmp',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if = True,
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://c6401.ambari.apache.org:8020',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- owner = 'hdfs',
- dfs_type = '',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- mode = 0777,
- )
- self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if = True,
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://c6401.ambari.apache.org:8020',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- owner = 'ambari-qa',
- dfs_type = '',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- mode = 0770,
- )
- self.assertResourceCalled('HdfsResource', None,
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if = True,
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://c6401.ambari.apache.org:8020',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- dfs_type = '',
- action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- hadoop_conf_dir = '/etc/hadoop/conf',
- )
- self.assertNoMoreResources()
- def test_stop_default(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "stop",
- config_file = "default.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode'",
- environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
- only_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid")
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',action = ['delete'])
- self.assertNoMoreResources()
- def test_configure_secured(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "configure",
- config_file = "secured.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assert_configure_secured()
- self.assertNoMoreResources()
- def test_start_secured(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "start",
- config_file = "secured.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = [(0,"")],
- )
- self.assert_configure_secured()
- self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
- self.assertResourceCalled('Execute', 'yes Y | hdfs --config /etc/hadoop/conf namenode -format',
- path = ['/usr/bin'],
- user = 'hdfs',
- )
- self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
- create_parents = True,
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop',
- owner = 'hdfs',
- group = 'hadoop',
- mode = 0755
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
- owner = 'hdfs',
- group = 'hadoop',
- create_parents = True,
- )
- self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
- owner = 'hdfs',
- group = 'hadoop',
- create_parents = True,
- )
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
- action = ['delete'],
- not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
- environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
- not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
- )
- self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
- user='hdfs',
- )
- self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
- tries=115,
- try_sleep=10,
- user="hdfs",
- logoutput=True
- )
- self.assertResourceCalled('HdfsResource', '/tmp',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = True,
- hadoop_bin_dir = '/usr/bin',
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- kinit_path_local = '/usr/bin/kinit',
- user = 'hdfs',
- owner = 'hdfs',
- dfs_type = '',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
- mode = 0777,
- only_if = True
- )
- self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = True,
- hadoop_bin_dir = '/usr/bin',
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- kinit_path_local = '/usr/bin/kinit',
- user = 'hdfs',
- dfs_type = '',
- owner = 'ambari-qa',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
- mode = 0770,
- only_if = True
- )
- self.assertResourceCalled('HdfsResource', None,
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = True,
- only_if = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- hadoop_bin_dir = '/usr/bin',
- kinit_path_local = '/usr/bin/kinit',
- user = 'hdfs',
- dfs_type = '',
- action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
- hadoop_conf_dir = '/etc/hadoop/conf',
- )
- self.assertNoMoreResources()
- def test_stop_secured(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "stop",
- config_file = "secured.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode'",
- environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
- only_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid")
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',action = ['delete'])
- self.assertNoMoreResources()
- def test_start_ha_default(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "start",
- config_file = "ha_default.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assert_configure_default()
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop',
- owner = 'hdfs',
- group = 'hadoop',
- mode = 0755
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
- owner = 'hdfs',
- group = 'hadoop',
- create_parents = True,
- )
- self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
- owner = 'hdfs',
- group = 'hadoop',
- create_parents = True,
- )
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
- action = ['delete'],
- not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
- environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
- not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
- )
- self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
- tries=115,
- try_sleep=10,
- user="hdfs",
- logoutput=True
- )
- self.assertResourceCalled('HdfsResource', '/tmp',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://ns1',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- dfs_type = '',
- owner = 'hdfs',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- mode = 0777,
- )
- self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://ns1',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- dfs_type = '',
- owner = 'ambari-qa',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- mode = 0770,
- )
- self.assertResourceCalled('HdfsResource', None,
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://ns1',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- dfs_type = '',
- action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- hadoop_conf_dir = '/etc/hadoop/conf',
- )
- self.assertNoMoreResources()
- @patch.object(shell, "call")
- @patch.object(time, "sleep")
- def test_start_ha_default_active_with_retry(self, sleep_mock, call_mocks):
- call_mocks = MagicMock()
- call_mocks.side_effect = [(1, None), (1, None), (1, None), (1, None), (0, None)]
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "start",
- config_file = "ha_default.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = call_mocks
- )
- self.assert_configure_default()
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop',
- owner = 'hdfs',
- group = 'hadoop',
- mode = 0755
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
- owner = 'hdfs',
- group = 'hadoop',
- create_parents = True,
- )
- self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
- owner = 'hdfs',
- group = 'hadoop',
- create_parents = True,
- )
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
- action = ['delete'],
- not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
- environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
- not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
- )
- self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
- tries=115,
- try_sleep=10,
- user="hdfs",
- logoutput=True
- )
- self.assertResourceCalled('HdfsResource', '/tmp',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://ns1',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- dfs_type = '',
- owner = 'hdfs',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- mode = 0777,
- )
- self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://ns1',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- dfs_type = '',
- owner = 'ambari-qa',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- mode = 0770,
- )
- self.assertResourceCalled('HdfsResource', None,
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://ns1',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- dfs_type = '',
- action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- hadoop_conf_dir = '/etc/hadoop/conf',
- )
- self.assertNoMoreResources()
- self.assertTrue(call_mocks.called)
- self.assertEqual(5, call_mocks.call_count)
- calls = [
- call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'"),
- call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'"),
- call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'"),
- call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'"),
- call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'")]
- call_mocks.assert_has_calls(calls)
- def test_start_ha_secured(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "start",
- config_file = "ha_secured.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assert_configure_secured()
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop',
- owner = 'hdfs',
- group = 'hadoop',
- mode = 0755
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
- owner = 'hdfs',
- group = 'hadoop',
- create_parents = True,
- )
- self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
- owner = 'hdfs',
- group = 'hadoop',
- create_parents = True,
- )
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
- action = ['delete'],
- not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
- environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
- not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
- )
- self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
- user = 'hdfs',
- )
- self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
- tries=115,
- try_sleep=10,
- user="hdfs",
- logoutput=True
- )
- self.assertResourceCalled('HdfsResource', '/tmp',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = True,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://ns1',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = 'hdfs',
- user = 'hdfs',
- dfs_type = '',
- owner = 'hdfs',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- mode = 0777,
- )
- self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = True,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://ns1',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = 'hdfs',
- user = 'hdfs',
- dfs_type = '',
- owner = 'ambari-qa',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- mode = 0770,
- )
- self.assertResourceCalled('HdfsResource', None,
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = True,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://ns1',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = 'hdfs',
- user = 'hdfs',
- dfs_type = '',
- action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- hadoop_conf_dir = '/etc/hadoop/conf',
- )
- self.assertNoMoreResources()
- # tests namenode start command when NameNode HA is enabled, and
- # the HA cluster is started initially, rather than using the UI Wizard
- def test_start_ha_bootstrap_active_from_blueprint(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "start",
- config_file="ha_bootstrap_active_node.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assert_configure_default()
- # verify that active namenode was formatted
- self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
- self.assertResourceCalled('Execute', 'yes Y | hdfs --config /etc/hadoop/conf namenode -format',
- path = ['/usr/bin'],
- user = 'hdfs',
- )
- self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
- create_parents = True,
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop',
- owner = 'hdfs',
- group = 'hadoop',
- mode = 0755
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
- owner = 'hdfs',
- group = 'hadoop',
- create_parents = True,
- )
- self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
- owner = 'hdfs',
- group = 'hadoop',
- create_parents = True,
- )
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
- action = ['delete'],
- not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
- environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
- not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
- )
- self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
- tries=115,
- try_sleep=10,
- user="hdfs",
- logoutput=True
- )
- self.assertResourceCalled('HdfsResource', '/tmp',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://ns1',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- dfs_type = '',
- owner = 'hdfs',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- mode = 0777,
- )
- self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://ns1',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- dfs_type = '',
- owner = 'ambari-qa',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- mode = 0770,
- )
- self.assertResourceCalled('HdfsResource', None,
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://ns1',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- dfs_type = '',
- action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- hadoop_conf_dir = '/etc/hadoop/conf',
- )
- self.assertNoMoreResources()
- # tests namenode start command when NameNode HA is enabled, and
- # the HA cluster is started initially, rather than using the UI Wizard
- # this test verifies the startup of a "standby" namenode
- @patch.object(shell, "call")
- def test_start_ha_bootstrap_standby_from_blueprint(self, call_mocks):
- call_mocks = MagicMock(return_value=(0,""))
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "start",
- config_file="ha_bootstrap_standby_node.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = call_mocks
- )
- self.assert_configure_default()
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop',
- owner = 'hdfs',
- group = 'hadoop',
- mode = 0755
- )
- # TODO: Using shell.call() to bootstrap standby which is patched to return status code '5' (i.e. already bootstrapped)
- # Need to update the test case to verify that the standby case is detected, and that the bootstrap
- # command is run before the namenode launches
- self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
- owner = 'hdfs',
- group = 'hadoop',
- create_parents = True,
- )
- self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
- owner = 'hdfs',
- group = 'hadoop',
- create_parents = True,
- )
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
- action = ['delete'],
- not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
- environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
- not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
- )
- self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6402.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
- tries=115,
- try_sleep=10,
- user="hdfs",
- logoutput=True
- )
- self.assertResourceCalled('HdfsResource', '/tmp',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://ns1',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- dfs_type = '',
- owner = 'hdfs',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- mode = 0777,
- )
- self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://ns1',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- dfs_type = '',
- owner = 'ambari-qa',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- mode = 0770,
- )
- self.assertResourceCalled('HdfsResource', None,
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://ns1',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- dfs_type = '',
- action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- hadoop_conf_dir = '/etc/hadoop/conf',
- )
- self.assertNoMoreResources()
- self.assertTrue(call_mocks.called)
- self.assertEqual(2, call_mocks.call_count)
- calls = [
- call('hdfs namenode -bootstrapStandby -nonInteractive', logoutput=False, user=u'hdfs'),
- call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'")]
- call_mocks.assert_has_calls(calls, any_order=False)
- # tests namenode start command when NameNode HA is enabled, and
- # the HA cluster is started initially, rather than using the UI Wizard
- # this test verifies the startup of a "standby" namenode
- @patch.object(shell, "call")
- def test_start_ha_bootstrap_standby_from_blueprint_initial_start(self, call_mocks):
- call_mocks = MagicMock()
- call_mocks.side_effect = [(1, None), (0, None), (0, None)]
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "start",
- config_file="ha_bootstrap_standby_node_initial_start.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = call_mocks
- )
- self.assert_configure_default()
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop',
- owner = 'hdfs',
- group = 'hadoop',
- mode = 0755
- )
- # TODO: Using shell.call() to bootstrap standby which is patched to return status code '5' (i.e. already bootstrapped)
- # Need to update the test case to verify that the standby case is detected, and that the bootstrap
- # command is run before the namenode launches
- self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
- owner = 'hdfs',
- group = 'hadoop',
- create_parents = True,
- )
- self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
- owner = 'hdfs',
- group = 'hadoop',
- create_parents = True,
- )
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
- action = ['delete'],
- not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
- environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
- not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
- )
- self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://c6402.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
- tries=115,
- try_sleep=10,
- user="hdfs",
- logoutput=True
- )
- self.assertResourceCalled('HdfsResource', '/tmp',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://ns1',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- dfs_type = '',
- owner = 'hdfs',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- mode = 0777,
- )
- self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://ns1',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- dfs_type = '',
- owner = 'ambari-qa',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- mode = 0770,
- )
- self.assertResourceCalled('HdfsResource', None,
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
- keytab = UnknownConfigurationMock(),
- hadoop_bin_dir = '/usr/bin',
- default_fs = 'hdfs://ns1',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = None,
- user = 'hdfs',
- dfs_type = '',
- action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- hadoop_conf_dir = '/etc/hadoop/conf',
- )
- self.assertNoMoreResources()
- self.assertTrue(call_mocks.called)
- self.assertEqual(3, call_mocks.call_count)
- calls = [
- call("ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'"),
- call('hdfs namenode -bootstrapStandby -nonInteractive -force', logoutput=False, user=u'hdfs'),
- call('hdfs namenode -bootstrapStandby -nonInteractive -force', logoutput=False, user=u'hdfs')]
- call_mocks.assert_has_calls(calls, any_order=True)
- def test_decommission_default(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "decommission",
- config_file = "default.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Execute', '', user = 'hdfs')
- self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
- user = 'hdfs',
- conf_dir = '/etc/hadoop/conf',
- bin_dir = '/usr/bin')
- self.assertNoMoreResources()
- def test_decommission_update_exclude_file_only(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "decommission",
- config_file = "default_update_exclude_file_only.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertNoMoreResources()
- def test_decommission_ha_default(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "decommission",
- config_file = "ha_default.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Execute', '', user = 'hdfs')
- self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
- user = 'hdfs',
- conf_dir = '/etc/hadoop/conf',
- bin_dir = '/usr/bin')
- self.assertNoMoreResources()
- def test_decommission_secured(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "decommission",
- config_file = "secured.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6401.ambari.apache.org@EXAMPLE.COM;',
- user = 'hdfs',
- )
- self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
- bin_dir = '/usr/bin',
- conf_dir = '/etc/hadoop/conf',
- user = 'hdfs',
- )
- self.assertNoMoreResources()
- def assert_configure_default(self):
- self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
- create_parents = True,
- )
- self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64',
- create_parents = True,
- )
- self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
- to = '/usr/lib/hadoop/lib/libsnappy.so',
- )
- self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
- to = '/usr/lib/hadoop/lib64/libsnappy.so',
- )
- self.assertResourceCalled('Directory', '/etc/security/limits.d',
- owner = 'root',
- group = 'root',
- create_parents = True,
- )
- self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
- content = Template('hdfs.conf.j2'),
- owner = 'root',
- group = 'root',
- mode = 0644,
- )
- self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
- owner = 'hdfs',
- group = 'hadoop',
- conf_dir = '/etc/hadoop/conf',
- configurations = self.getConfig()['configurations']['hdfs-site'],
- configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
- )
- self.assertResourceCalled('XmlConfig', 'core-site.xml',
- owner = 'hdfs',
- group = 'hadoop',
- conf_dir = '/etc/hadoop/conf',
- configurations = self.getConfig()['configurations']['core-site'],
- configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
- mode = 0644
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
- content = Template('slaves.j2'),
- owner = 'hdfs',
- )
- self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
- owner = 'hdfs',
- group = 'hadoop',
- create_parents = True,
- mode = 0755,
- cd_access='a'
- )
- def assert_configure_secured(self):
- self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
- create_parents = True,
- )
- self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-amd64-64',
- create_parents = True,
- )
- self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
- to = '/usr/lib/hadoop/lib/libsnappy.so',
- )
- self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
- to = '/usr/lib/hadoop/lib64/libsnappy.so',
- )
- self.assertResourceCalled('Directory', '/etc/security/limits.d',
- owner = 'root',
- group = 'root',
- create_parents = True,
- )
- self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
- content = Template('hdfs.conf.j2'),
- owner = 'root',
- group = 'root',
- mode = 0644,
- )
- self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
- owner = 'hdfs',
- group = 'hadoop',
- conf_dir = '/etc/hadoop/conf',
- configurations = self.getConfig()['configurations']['hdfs-site'],
- configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
- )
- self.assertResourceCalled('XmlConfig', 'core-site.xml',
- owner = 'hdfs',
- group = 'hadoop',
- conf_dir = '/etc/hadoop/conf',
- configurations = self.getConfig()['configurations']['core-site'],
- configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
- mode = 0644
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
- content = Template('slaves.j2'),
- owner = 'root',
- )
- self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
- owner = 'hdfs',
- group = 'hadoop',
- create_parents = True,
- mode = 0755,
- cd_access='a'
- )
- @patch("resource_management.libraries.script.Script.put_structured_out")
- def test_rebalance_hdfs(self, pso):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "rebalancehdfs",
- config_file = "rebalancehdfs_default.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf balancer -threshold -1'",
- logoutput = False,
- on_new_line = FunctionMock('handle_new_line'),
- )
- self.assertNoMoreResources()
- @patch("resource_management.libraries.script.Script.put_structured_out")
- @patch("os.system")
- def test_rebalance_secured_hdfs(self, pso, system_mock):
- system_mock.return_value = -1
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "rebalancehdfs",
- config_file = "rebalancehdfs_secured.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks=[(1, "no kinit")]
- )
- tempdir = tempfile.gettempdir()
- ccache_path = os.path.join(tempfile.gettempdir(), "hdfs_rebalance_cc_7add60ca651f1bd1ed909a6668937ba9")
- kinit_cmd = "/usr/bin/kinit -c {0} -kt /etc/security/keytabs/hdfs.headless.keytab hdfs@EXAMPLE.COM".format(ccache_path)
- rebalance_cmd = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin KRB5CCNAME={0} ; hdfs --config /etc/hadoop/conf balancer -threshold -1'".format(ccache_path)
- self.assertResourceCalled('Execute', kinit_cmd,
- user = 'hdfs',
- )
- self.assertResourceCalled('Execute', rebalance_cmd,
- logoutput = False,
- on_new_line = FunctionMock('handle_new_line'),
- )
- self.assertResourceCalled('File', ccache_path,
- action = ['delete'],
- )
- self.assertNoMoreResources()
- @patch("os.path.isfile")
- def test_ranger_installed_missing_file(self, isfile_mock):
- """
- Tests that when Ranger is enabled for HDFS, that an exception is thrown
- if there is no install.properties found
- :return:
- """
- isfile_mock.return_value = False
- try:
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode", command = "start", config_file = "ranger-namenode-start.json",
- stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES )
- self.fail("Expected a failure since the ranger install.properties was missing")
- except Fail, failure:
- pass
- self.assertTrue(isfile_mock.called)
- @patch("resource_management.libraries.functions.security_commons.build_expectations")
- @patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
- @patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
- @patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
- @patch("resource_management.libraries.script.Script.put_structured_out")
- def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
- # Test that function works when is called with correct parameters
- security_params = {
- 'core-site': {
- 'hadoop.security.authentication': 'kerberos'
- },
- 'hdfs-site': {
- 'dfs.namenode.keytab.file': 'path/to/namenode/keytab/file',
- 'dfs.namenode.kerberos.principal': 'namenode_principal'
- }
- }
- props_value_check = None
- props_empty_check = ['dfs.namenode.kerberos.internal.spnego.principal',
- 'dfs.namenode.keytab.file',
- 'dfs.namenode.kerberos.principal']
- props_read_check = ['dfs.namenode.keytab.file']
- result_issues = []
- get_params_mock.return_value = security_params
- validate_security_config_mock.return_value = result_issues
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "security_status",
- config_file="secured.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- build_exp_mock.assert_called_with('hdfs-site', props_value_check, props_empty_check, props_read_check)
- put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
- cached_kinit_executor_mock.called_with('/usr/bin/kinit',
- self.config_dict['configurations']['hadoop-env']['hdfs_user'],
- security_params['hdfs-site']['dfs.namenode.keytab.file'],
- security_params['hdfs-site']['dfs.namenode.kerberos.principal'],
- self.config_dict['hostname'],
- '/tmp')
- # Testing when hadoop.security.authentication is simple
- security_params['core-site']['hadoop.security.authentication'] = 'simple'
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "security_status",
- config_file="secured.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
- security_params['core-site']['hadoop.security.authentication'] = 'kerberos'
- # Testing that the exception throw by cached_executor is caught
- cached_kinit_executor_mock.reset_mock()
- cached_kinit_executor_mock.side_effect = Exception("Invalid command")
- try:
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "security_status",
- config_file="secured.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- except:
- self.assertTrue(True)
- # Testing with a security_params which doesn't contains hdfs-site
- empty_security_params = {
- 'core-site': {
- 'hadoop.security.authentication': 'kerberos'
- }
- }
- cached_kinit_executor_mock.reset_mock()
- get_params_mock.reset_mock()
- put_structured_out_mock.reset_mock()
- get_params_mock.return_value = empty_security_params
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "security_status",
- config_file="secured.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- put_structured_out_mock.assert_called_with({"securityIssuesFound": "Keytab file or principal are not set property."})
- # Testing with not empty result_issues
- result_issues_with_params = {
- 'hdfs-site': "Something bad happened"
- }
- validate_security_config_mock.reset_mock()
- get_params_mock.reset_mock()
- validate_security_config_mock.return_value = result_issues_with_params
- get_params_mock.return_value = security_params
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "security_status",
- config_file="secured.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
- @patch("utils.get_namenode_states")
- def test_upgrade_restart(self, get_namenode_states_mock):
- # Execution of nn_ru_lzo invokes a code path that invokes lzo installation, which
- # was failing in RU case. See hdfs.py and the lzo_enabled check that is in it.
- # Just executing the script is enough to test the fix
- active_namenodes = [('nn1', 'c6401.ambari.apache.org:50070')]
- standby_namenodes = [('nn2', 'c6402.ambari.apache.org:50070')]
- unknown_namenodes = []
- get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "restart",
- config_file = "nn_ru_lzo.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES)
- unknown_namenodes = active_namenodes
- active_namenodes = []
- get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "restart",
- config_file = "nn_ru_lzo.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES)
- self.assertFalse(0 == len(Script.structuredOut))
- self.assertTrue(Script.structuredOut.has_key("upgrade_type"))
- self.assertTrue(Script.structuredOut.has_key("direction"))
- self.assertEquals("rolling_upgrade", Script.structuredOut["upgrade_type"])
- self.assertEquals("UPGRADE", Script.structuredOut["direction"])
- @patch("utils.get_namenode_states")
- def test_upgrade_restart_eu(self, get_namenode_states_mock):
- active_namenodes = [('nn1', 'c6401.ambari.apache.org:50070')]
- standby_namenodes = [('nn2', 'c6402.ambari.apache.org:50070')]
- unknown_namenodes = []
- mocks_dict = {}
- get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "restart",
- config_file = "nn_eu_standby.json",
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = [(0, None, ''), (0, None)],
- mocks_dict=mocks_dict)
-
- calls = mocks_dict['call'].call_args_list
- self.assertTrue(len(calls) >= 1)
- self.assertTrue(calls[0].startsWith("conf-select create-conf-dir --package hadoop --stack-version 2.3.2.0-2844 --conf-version 0"))
- @patch("hdfs_namenode.is_active_namenode")
- @patch("resource_management.libraries.functions.setup_ranger_plugin_xml.setup_ranger_plugin")
- @patch("utils.get_namenode_states")
- def test_upgrade_restart_eu_with_ranger(self, get_namenode_states_mock, setup_ranger_plugin_mock, is_active_nn_mock):
- is_active_nn_mock.return_value = True
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_eu.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- version = '2.3.4.0-1111'
- json_content['commandParams']['version'] = version
- active_namenodes = [('nn1', 'c6401.ambari.apache.org:50070')]
- standby_namenodes = [('nn2', 'c6402.ambari.apache.org:50070')]
- unknown_namenodes = []
- mocks_dict = {}
- get_namenode_states_mock.return_value = active_namenodes, standby_namenodes, unknown_namenodes
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "start",
- command_args=["nonrolling"],
- config_dict = json_content,
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = [(0, None, ''), (0, None)],
- mocks_dict=mocks_dict)
- self.assertTrue(setup_ranger_plugin_mock.called)
- self.assertResourceCalledByIndex(7, 'Execute',
- ('mv', '/usr/hdp/2.3.4.0-1111/hadoop/conf/set-hdfs-plugin-env.sh', '/usr/hdp/2.3.4.0-1111/hadoop/conf/set-hdfs-plugin-env.sh.bak'),
- only_if='test -f /usr/hdp/2.3.4.0-1111/hadoop/conf/set-hdfs-plugin-env.sh',
- sudo=True)
- def test_pre_upgrade_restart(self):
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- version = '2.2.1.0-3242'
- json_content['commandParams']['version'] = version
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "pre_upgrade_restart",
- config_dict = json_content,
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES)
- self.assertResourceCalled('Execute',
- ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-hdfs-namenode', version), sudo=True)
- @patch("resource_management.core.shell.call")
- def test_pre_upgrade_restart_23(self, call_mock):
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- version = '2.3.0.0-1234'
- json_content['commandParams']['version'] = version
- mocks_dict = {}
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "pre_upgrade_restart",
- config_dict = json_content,
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = [(0, None), (0, None)],
- mocks_dict = mocks_dict)
- self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-hdfs-namenode', version), sudo=True)
- self.assertNoMoreResources()
- def test_post_upgrade_restart(self):
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "post_upgrade_restart",
- config_dict = json_content,
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES)
- self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -report -live',
- user='hdfs',
- tries=60,
- try_sleep=10
- )
- self.assertNoMoreResources()
- def test_post_upgrade_ha_restart(self):
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/ha_default.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "post_upgrade_restart",
- config_dict = json_content,
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES)
- self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://ns1 -report -live',
- user='hdfs',
- tries=60,
- try_sleep=10
- )
- self.assertNoMoreResources()
- def test_prepare_rolling_upgrade__upgrade(self):
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/secured.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- json_content['commandParams']['upgrade_direction'] = 'upgrade'
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "prepare_rolling_upgrade",
- config_dict = json_content,
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = [(0, "Safe mode is OFF in c6401.ambari.apache.org")])
-
- self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
- logoutput = True, user = 'hdfs')
-
- self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -rollingUpgrade prepare',
- logoutput = True, user = 'hdfs')
- self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -rollingUpgrade query',
- logoutput = True, user = 'hdfs')
-
- self.assertNoMoreResources()
- def test_prepare_rolling_upgrade__upgrade(self):
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/ha_secured.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- json_content['commandParams']['upgrade_direction'] = 'upgrade'
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "prepare_rolling_upgrade",
- config_dict = json_content,
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = [(0, "Safe mode is OFF in c6401.ambari.apache.org")])
- self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
- logoutput = True, user = 'hdfs')
- self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://ns1 -rollingUpgrade prepare',
- logoutput = True, user = 'hdfs')
- self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://ns1 -rollingUpgrade query',
- logoutput = True, user = 'hdfs')
- self.assertNoMoreResources()
-
- @patch.object(shell, "call")
- def test_prepare_rolling_upgrade__downgrade(self, shell_call_mock):
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/secured.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- json_content['commandParams']['upgrade_direction'] = 'downgrade'
- # Mock safemode_check call
- shell_call_mock.return_value = 0, "Safe mode is OFF in c6401.ambari.apache.org"
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "prepare_rolling_upgrade",
- config_dict = json_content,
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES)
-
- self.assertResourceCalled('Execute',
- '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
- logoutput = True, user = 'hdfs')
-
- self.assertNoMoreResources()
- def test_finalize_rolling_upgrade(self):
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "finalize_rolling_upgrade",
- config_dict = json_content,
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES)
- self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -rollingUpgrade query',
- logoutput = True,
- user = 'hdfs',
- )
- self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -rollingUpgrade finalize',
- logoutput = True,
- user = 'hdfs',
- )
- self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -rollingUpgrade query',
- logoutput = True,
- user = 'hdfs',
- )
- self.assertNoMoreResources()
- def test_finalize_ha_rolling_upgrade(self):
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/ha_default.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "finalize_rolling_upgrade",
- config_dict = json_content,
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES)
- self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://ns1 -rollingUpgrade query',
- logoutput = True,
- user = 'hdfs',
- )
- self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://ns1 -rollingUpgrade finalize',
- logoutput = True,
- user = 'hdfs',
- )
- self.assertResourceCalled('Execute', 'hdfs dfsadmin -fs hdfs://ns1 -rollingUpgrade query',
- logoutput = True,
- user = 'hdfs',
- )
- self.assertNoMoreResources()
- @patch.object(shell, "call")
- def test_pre_upgrade_restart_21_and_lower_params(self, call_mock):
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- json_content['hostLevelParams']['stack_name'] = 'HDP'
- json_content['hostLevelParams']['stack_version'] = '2.0'
- mocks_dict = {}
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "pre_upgrade_restart",
- config_dict = json_content,
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = [(0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None)],
- mocks_dict = mocks_dict)
- import sys
- self.assertEquals("/etc/hadoop/conf", sys.modules["params"].hadoop_conf_dir)
- self.assertEquals("/usr/lib/hadoop/libexec", sys.modules["params"].hadoop_libexec_dir)
- self.assertEquals("/usr/bin", sys.modules["params"].hadoop_bin_dir)
- self.assertEquals("/usr/lib/hadoop/sbin", sys.modules["params"].hadoop_bin)
- @patch.object(shell, "call")
- def test_pre_upgrade_restart_22_params(self, call_mock):
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- version = '2.2.0.0-1234'
- del json_content['commandParams']['version']
- json_content['hostLevelParams']['stack_name'] = 'HDP'
- json_content['hostLevelParams']['stack_version'] = '2.2'
- json_content['commandParams']['version'] = version
- mocks_dict = {}
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "pre_upgrade_restart",
- config_dict = json_content,
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = [(0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None)],
- mocks_dict = mocks_dict)
- import sys
- self.assertEquals("/usr/hdp/current/hadoop-client/conf", sys.modules["params"].hadoop_conf_dir)
- self.assertEquals("/usr/hdp/{0}/hadoop/libexec".format(version), sys.modules["params"].hadoop_libexec_dir)
- self.assertEquals("/usr/hdp/{0}/hadoop/bin".format(version), sys.modules["params"].hadoop_bin_dir)
- self.assertEquals("/usr/hdp/{0}/hadoop/sbin".format(version), sys.modules["params"].hadoop_bin)
- @patch.object(shell, "call")
- def test_pre_upgrade_restart_23_params(self, call_mock):
- import itertools
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- version = '2.3.0.0-1234'
- json_content['commandParams']['version'] = version
- json_content['commandParams']['upgrade_direction'] = 'upgrade'
- json_content['hostLevelParams']['stack_name'] = 'HDP'
- json_content['hostLevelParams']['stack_version'] = '2.3'
- mocks_dict = {}
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "pre_upgrade_restart",
- config_dict = json_content,
- stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = itertools.cycle([(0, None)]),
- mocks_dict = mocks_dict)
- import sys
- self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/conf", sys.modules["params"].hadoop_conf_dir)
- self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/libexec", sys.modules["params"].hadoop_libexec_dir)
- self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/bin", sys.modules["params"].hadoop_bin_dir)
- self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/sbin", sys.modules["params"].hadoop_bin)
- class Popen_Mock:
- return_value = 1
- lines = ['Time Stamp Iteration# Bytes Already Moved Bytes Left To Move Bytes Being Moved\n',
- 'Jul 28, 2014 5:01:49 PM 0 0 B 5.74 GB 9.79 GB\n',
- 'Jul 28, 2014 5:03:00 PM 1 0 B 5.58 GB 9.79 GB\n',
- '']
- def __call__(self, *args,**kwargs):
- popen = MagicMock()
- popen.returncode = Popen_Mock.return_value
- popen.stdout.readline = MagicMock(side_effect = Popen_Mock.lines)
- return popen
|