1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282 |
- #!/usr/bin/env python
- '''
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- from ambari_commons import OSCheck
- '''
- import json
- import os
- import tempfile
- from stacks.utils.RMFTestCase import *
- from mock.mock import MagicMock, patch
- import resource_management
- from resource_management.core import shell
- from resource_management.core.exceptions import Fail
- class TestNamenode(RMFTestCase):
- COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package"
- STACK_VERSION = "2.0.6"
- def test_configure_default(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "configure",
- config_file = "default.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assert_configure_default()
- self.assertNoMoreResources()
- def test_start_default_alt_fs(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "start",
- config_file = "altfs_plus_hdfs.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = [(5,"")],
- )
- self.assert_configure_default()
- self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
- self.assertResourceCalled('Execute', 'yes Y | hdfs --config /etc/hadoop/conf namenode -format',
- path = ['/usr/bin'],
- user = 'hdfs',
- )
- self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
- recursive = True,
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop',
- owner = 'hdfs',
- group = 'hadoop',
- mode = 0755
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
- owner = 'hdfs',
- recursive = True,
- )
- self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
- owner = 'hdfs',
- recursive = True,
- )
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
- action = ['delete'],
- not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
- environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
- not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', 'hdfs --config /etc/hadoop/conf dfsadmin -fs hdfs://c6405.ambari.apache.org:8020 -safemode leave',
- path = ['/usr/bin'],
- tries = 10,
- try_sleep = 10,
- user = 'hdfs',
- )
- self.assertResourceCalled('Execute', "hadoop dfsadmin -fs hdfs://c6405.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
- path = ['/usr/bin'],
- tries = 40,
- only_if = None,
- user = 'hdfs',
- try_sleep = 10,
- )
- self.assertResourceCalled('HdfsDirectory', '/tmp',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0777,
- owner = 'hdfs',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0770,
- owner = 'ambari-qa',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- action = ['create'],
- bin_dir = '/usr/bin',
- only_if = None,
- )
- self.assertNoMoreResources()
- pass
- def test_install_default(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "install",
- config_file = "default_no_install.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- try_install=True
- )
- self.assert_configure_default()
- self.assertNoMoreResources()
- pass
- def test_start_default(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "start",
- config_file = "default.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = [(5,"")],
- )
- self.assert_configure_default()
- self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
- self.assertResourceCalled('Execute', 'yes Y | hdfs --config /etc/hadoop/conf namenode -format',
- path = ['/usr/bin'],
- user = 'hdfs',
- )
- self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
- recursive = True,
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop',
- owner = 'hdfs',
- group = 'hadoop',
- mode = 0755
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
- owner = 'hdfs',
- recursive = True,
- )
- self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
- owner = 'hdfs',
- recursive = True,
- )
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
- action = ['delete'],
- not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
- environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
- not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', 'hdfs --config /etc/hadoop/conf dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode leave',
- path = ['/usr/bin'],
- tries = 10,
- try_sleep = 10,
- user = 'hdfs',
- )
- self.assertResourceCalled('Execute', "hadoop dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
- path = ['/usr/bin'],
- tries = 40,
- only_if = None,
- user = 'hdfs',
- try_sleep = 10,
- )
- self.assertResourceCalled('HdfsDirectory', '/tmp',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0777,
- owner = 'hdfs',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0770,
- owner = 'ambari-qa',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- action = ['create'],
- bin_dir = '/usr/bin',
- only_if = None,
- )
- self.assertNoMoreResources()
- def test_stop_default(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "stop",
- config_file = "default.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
- action = ['delete'],
- not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode'",
- environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
- not_if = None,
- )
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
- action = ['delete'],
- )
- self.assertNoMoreResources()
- def test_configure_secured(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "configure",
- config_file = "secured.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assert_configure_secured()
- self.assertNoMoreResources()
- def test_start_secured(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "start",
- config_file = "secured.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = [(5,"")],
- )
- self.assert_configure_secured()
- self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
- self.assertResourceCalled('Execute', 'yes Y | hdfs --config /etc/hadoop/conf namenode -format',
- path = ['/usr/bin'],
- user = 'hdfs',
- )
- self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
- recursive = True,
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop',
- owner = 'hdfs',
- group = 'hadoop',
- mode = 0755
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
- owner = 'hdfs',
- recursive = True,
- )
- self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
- owner = 'hdfs',
- recursive = True,
- )
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
- action = ['delete'],
- not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
- environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
- not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
- user='hdfs',
- )
- self.assertResourceCalled('Execute', 'hdfs --config /etc/hadoop/conf dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode leave',
- path = ['/usr/bin'],
- tries = 10,
- try_sleep = 10,
- user = 'hdfs',
- )
- self.assertResourceCalled('Execute', "hadoop dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
- path = ['/usr/bin'],
- tries = 40,
- only_if = None,
- user = 'hdfs',
- try_sleep = 10,
- )
- self.assertResourceCalled('HdfsDirectory', '/tmp',
- security_enabled = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0777,
- owner = 'hdfs',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
- security_enabled = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0770,
- owner = 'ambari-qa',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- action = ['create'],
- bin_dir = '/usr/bin',
- only_if = None,
- )
- self.assertNoMoreResources()
- def test_stop_secured(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "stop",
- config_file = "secured.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
- action = ['delete'],
- not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode'",
- environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
- not_if = None,
- )
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
- action = ['delete'],
- )
- self.assertNoMoreResources()
- def test_start_ha_default(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "start",
- config_file = "ha_default.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assert_configure_default()
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop',
- owner = 'hdfs',
- group = 'hadoop',
- mode = 0755
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
- owner = 'hdfs',
- recursive = True,
- )
- self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
- owner = 'hdfs',
- recursive = True,
- )
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
- action = ['delete'],
- not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
- environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
- not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', "hadoop dfsadmin -fs hdfs://ns1 -safemode get | grep 'Safe mode is OFF'",
- path = ['/usr/bin'],
- tries = 40,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
- user = 'hdfs',
- try_sleep = 10,
- )
- self.assertResourceCalled('HdfsDirectory', '/tmp',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0777,
- owner = 'hdfs',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0770,
- owner = 'ambari-qa',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- action = ['create'],
- bin_dir = '/usr/bin',
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
- )
- self.assertNoMoreResources()
- def test_start_ha_secured(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "start",
- config_file = "ha_secured.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assert_configure_secured()
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop',
- owner = 'hdfs',
- group = 'hadoop',
- mode = 0755
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
- owner = 'hdfs',
- recursive = True,
- )
- self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
- owner = 'hdfs',
- recursive = True,
- )
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
- action = ['delete'],
- not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
- environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
- not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
- user = 'hdfs',
- )
- self.assertResourceCalled('Execute', "hadoop dfsadmin -fs hdfs://ns1 -safemode get | grep 'Safe mode is OFF'",
- path = ['/usr/bin'],
- tries = 40,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
- user = 'hdfs',
- try_sleep = 10,
- )
- self.assertResourceCalled('HdfsDirectory', '/tmp',
- security_enabled = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0777,
- owner = 'hdfs',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
- security_enabled = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0770,
- owner = 'ambari-qa',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- action = ['create'],
- bin_dir = '/usr/bin',
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
- )
- self.assertNoMoreResources()
- # tests namenode start command when NameNode HA is enabled, and
- # the HA cluster is started initially, rather than using the UI Wizard
- def test_start_ha_bootstrap_active_from_blueprint(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "start",
- config_file="ha_bootstrap_active_node.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assert_configure_default()
- # verify that active namenode was formatted
- self.assertResourceCalled('Execute', 'ls /hadoop/hdfs/namenode | wc -l | grep -q ^0$',)
- self.assertResourceCalled('Execute', 'yes Y | hdfs --config /etc/hadoop/conf namenode -format',
- path = ['/usr/bin'],
- user = 'hdfs',
- )
- self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode/namenode-formatted/',
- recursive = True,
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop',
- owner = 'hdfs',
- group = 'hadoop',
- mode = 0755
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
- owner = 'hdfs',
- recursive = True,
- )
- self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
- owner = 'hdfs',
- recursive = True,
- )
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
- action = ['delete'],
- not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
- environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
- not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', "hadoop dfsadmin -fs hdfs://ns1 -safemode get | grep 'Safe mode is OFF'",
- path = ['/usr/bin'],
- tries = 40,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
- user = 'hdfs',
- try_sleep = 10,
- )
- self.assertResourceCalled('HdfsDirectory', '/tmp',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0777,
- owner = 'hdfs',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0770,
- owner = 'ambari-qa',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- action = ['create'],
- bin_dir = '/usr/bin',
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
- )
- self.assertNoMoreResources()
- # tests namenode start command when NameNode HA is enabled, and
- # the HA cluster is started initially, rather than using the UI Wizard
- # this test verifies the startup of a "standby" namenode
- @patch.object(shell, "call", new=MagicMock(return_value=(5,"")))
- def test_start_ha_bootstrap_standby_from_blueprint(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "start",
- config_file="ha_bootstrap_standby_node.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assert_configure_default()
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Directory', '/var/run/hadoop',
- owner = 'hdfs',
- group = 'hadoop',
- mode = 0755
- )
- # TODO: Using shell.call() to bootstrap standby which is patched to return status code '5' (i.e. already bootstrapped)
- # Need to update the test case to verify that the standby case is detected, and that the bootstrap
- # command is run before the namenode launches
- self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
- owner = 'hdfs',
- recursive = True,
- )
- self.assertResourceCalled('Directory', '/var/log/hadoop/hdfs',
- owner = 'hdfs',
- recursive = True,
- )
- self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
- action = ['delete'],
- not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
- environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
- not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
- )
- self.assertResourceCalled('Execute', "hadoop dfsadmin -fs hdfs://ns1 -safemode get | grep 'Safe mode is OFF'",
- path = ['/usr/bin'],
- tries = 40,
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
- user = 'hdfs',
- try_sleep = 10,
- )
- self.assertResourceCalled('HdfsDirectory', '/tmp',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0777,
- owner = 'hdfs',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0770,
- owner = 'ambari-qa',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- action = ['create'],
- bin_dir = '/usr/bin',
- only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn2 | grep active'",
- )
- self.assertNoMoreResources()
- def test_decommission_default(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "decommission",
- config_file = "default.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Execute', '', user = 'hdfs')
- self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -refreshNodes',
- user = 'hdfs',
- conf_dir = '/etc/hadoop/conf',
- bin_dir = '/usr/bin',
- kinit_override = True)
- self.assertNoMoreResources()
- def test_decommission_update_exclude_file_only(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "decommission",
- config_file = "default_update_exclude_file_only.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertNoMoreResources()
- def test_decommission_ha_default(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "decommission",
- config_file = "ha_default.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Execute', '', user = 'hdfs')
- self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
- user = 'hdfs',
- conf_dir = '/etc/hadoop/conf',
- bin_dir = '/usr/bin',
- kinit_override = True)
- self.assertNoMoreResources()
- def test_decommission_secured(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "decommission",
- config_file = "secured.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
- owner = 'hdfs',
- content = Template('exclude_hosts_list.j2'),
- group = 'hadoop',
- )
- self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6401.ambari.apache.org@EXAMPLE.COM;',
- user = 'hdfs',
- )
- self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -refreshNodes',
- bin_dir = '/usr/bin',
- conf_dir = '/etc/hadoop/conf',
- kinit_override = True,
- user = 'hdfs',
- )
- self.assertNoMoreResources()
- def assert_configure_default(self):
- self.assertResourceCalled('Directory', '/etc/security/limits.d',
- owner = 'root',
- group = 'root',
- recursive = True,
- )
- self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
- content = Template('hdfs.conf.j2'),
- owner = 'root',
- group = 'root',
- mode = 0644,
- )
- self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
- owner = 'hdfs',
- group = 'hadoop',
- conf_dir = '/etc/hadoop/conf',
- configurations = self.getConfig()['configurations']['hdfs-site'],
- configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
- )
- self.assertResourceCalled('XmlConfig', 'core-site.xml',
- owner = 'hdfs',
- group = 'hadoop',
- conf_dir = '/etc/hadoop/conf',
- configurations = self.getConfig()['configurations']['core-site'],
- configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
- mode = 0644
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
- content = Template('slaves.j2'),
- owner = 'hdfs',
- )
- self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
- owner = 'hdfs',
- group = 'hadoop',
- recursive = True,
- mode = 0755,
- cd_access='a'
- )
- def assert_configure_secured(self):
- self.assertResourceCalled('Directory', '/etc/security/limits.d',
- owner = 'root',
- group = 'root',
- recursive = True,
- )
- self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
- content = Template('hdfs.conf.j2'),
- owner = 'root',
- group = 'root',
- mode = 0644,
- )
- self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
- owner = 'hdfs',
- group = 'hadoop',
- conf_dir = '/etc/hadoop/conf',
- configurations = self.getConfig()['configurations']['hdfs-site'],
- configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
- )
- self.assertResourceCalled('XmlConfig', 'core-site.xml',
- owner = 'hdfs',
- group = 'hadoop',
- conf_dir = '/etc/hadoop/conf',
- configurations = self.getConfig()['configurations']['core-site'],
- configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
- mode = 0644
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
- content = Template('slaves.j2'),
- owner = 'root',
- )
- self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
- owner = 'hdfs',
- group = 'hadoop',
- recursive = True,
- mode = 0755,
- cd_access='a'
- )
- @patch("resource_management.libraries.script.Script.put_structured_out")
- def test_rebalance_hdfs(self, pso):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "rebalancehdfs",
- config_file = "rebalancehdfs_default.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf balancer -threshold -1'",
- logoutput = False,
- on_new_line = FunctionMock('handle_new_line'),
- )
- self.assertNoMoreResources()
- @patch("resource_management.libraries.script.Script.put_structured_out")
- @patch("os.system")
- def test_rebalance_secured_hdfs(self, pso, system_mock):
- system_mock.return_value = -1
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "rebalancehdfs",
- config_file = "rebalancehdfs_secured.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- tempdir = tempfile.gettempdir()
- ccache_path = os.path.join(tempfile.gettempdir(), "hdfs_rebalance_cc_7add60ca651f1bd1ed909a6668937ba9")
- kinit_cmd = "/usr/bin/kinit -c {0} -kt /etc/security/keytabs/hdfs.headless.keytab hdfs@EXAMPLE.COM".format(ccache_path)
- rebalance_cmd = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin KRB5CCNAME={0} ; hdfs --config /etc/hadoop/conf balancer -threshold -1'".format(ccache_path)
- self.assertResourceCalled('Execute', kinit_cmd,
- user = 'hdfs',
- )
- self.assertResourceCalled('Execute', rebalance_cmd,
- logoutput = False,
- on_new_line = FunctionMock('handle_new_line'),
- )
- self.assertNoMoreResources()
- @patch("os.path.isfile")
- def test_ranger_installed_missing_file(self, isfile_mock):
- """
- Tests that when Ranger is enabled for HDFS, that an exception is thrown
- if there is no install.properties found
- :return:
- """
- isfile_mock.return_value = False
- try:
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode", command = "start", config_file = "ranger-namenode-start.json",
- hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES )
- self.fail("Expected a failure since the ranger install.properties was missing")
- except Fail, failure:
- pass
- self.assertTrue(isfile_mock.called)
- @patch("resource_management.libraries.functions.security_commons.build_expectations")
- @patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
- @patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
- @patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
- @patch("resource_management.libraries.script.Script.put_structured_out")
- def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
- # Test that function works when is called with correct parameters
- security_params = {
- 'core-site': {
- 'hadoop.security.authentication': 'kerberos'
- },
- 'hdfs-site': {
- 'dfs.namenode.keytab.file': 'path/to/namenode/keytab/file',
- 'dfs.namenode.kerberos.principal': 'namenode_principal'
- }
- }
- props_value_check = None
- props_empty_check = ['dfs.namenode.kerberos.internal.spnego.principal',
- 'dfs.namenode.keytab.file',
- 'dfs.namenode.kerberos.principal']
- props_read_check = ['dfs.namenode.keytab.file']
- result_issues = []
- get_params_mock.return_value = security_params
- validate_security_config_mock.return_value = result_issues
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "security_status",
- config_file="secured.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- build_exp_mock.assert_called_with('hdfs-site', props_value_check, props_empty_check, props_read_check)
- put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
- cached_kinit_executor_mock.called_with('/usr/bin/kinit',
- self.config_dict['configurations']['hadoop-env']['hdfs_user'],
- security_params['hdfs-site']['dfs.namenode.keytab.file'],
- security_params['hdfs-site']['dfs.namenode.kerberos.principal'],
- self.config_dict['hostname'],
- '/tmp')
- # Testing when hadoop.security.authentication is simple
- security_params['core-site']['hadoop.security.authentication'] = 'simple'
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "security_status",
- config_file="secured.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
- security_params['core-site']['hadoop.security.authentication'] = 'kerberos'
- # Testing that the exception throw by cached_executor is caught
- cached_kinit_executor_mock.reset_mock()
- cached_kinit_executor_mock.side_effect = Exception("Invalid command")
- try:
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "security_status",
- config_file="secured.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- except:
- self.assertTrue(True)
- # Testing with a security_params which doesn't contains hdfs-site
- empty_security_params = {
- 'core-site': {
- 'hadoop.security.authentication': 'kerberos'
- }
- }
- cached_kinit_executor_mock.reset_mock()
- get_params_mock.reset_mock()
- put_structured_out_mock.reset_mock()
- get_params_mock.return_value = empty_security_params
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "security_status",
- config_file="secured.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- put_structured_out_mock.assert_called_with({"securityIssuesFound": "Keytab file or principal are not set property."})
- # Testing with not empty result_issues
- result_issues_with_params = {
- 'hdfs-site': "Something bad happened"
- }
- validate_security_config_mock.reset_mock()
- get_params_mock.reset_mock()
- validate_security_config_mock.return_value = result_issues_with_params
- get_params_mock.return_value = security_params
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "security_status",
- config_file="secured.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
- def test_upgrade_restart(self):
- # Execution of nn_ru_lzo invokes a code path that invokes lzo installation, which
- # was failing in RU case. See hdfs.py and the lzo_enabled check that is in it.
- # Just executing the script is enough to test the fix
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "restart",
- config_file = "nn_ru_lzo.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES)
- def test_pre_rolling_restart(self):
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- version = '2.2.1.0-3242'
- json_content['commandParams']['version'] = version
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "pre_rolling_restart",
- config_dict = json_content,
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES)
- self.assertResourceCalled('Execute',
- 'hdp-select set hadoop-hdfs-namenode %s' % version)
- self.assertNoMoreResources()
- @patch("resource_management.core.shell.call")
- def test_pre_rolling_restart_23(self, call_mock):
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- version = '2.3.0.0-1234'
- json_content['commandParams']['version'] = version
- mocks_dict = {}
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "pre_rolling_restart",
- config_dict = json_content,
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = [(0, None), (0, None)],
- mocks_dict = mocks_dict)
- self.assertResourceCalled('Execute', 'hdp-select set hadoop-hdfs-namenode %s' % version)
- self.assertNoMoreResources()
- self.assertEquals(2, mocks_dict['call'].call_count)
- self.assertEquals(
- "conf-select create-conf-dir --package hadoop --stack-version 2.3.0.0-1234 --conf-version 0",
- mocks_dict['call'].call_args_list[0][0][0])
- self.assertEquals(
- "conf-select set-conf-dir --package hadoop --stack-version 2.3.0.0-1234 --conf-version 0",
- mocks_dict['call'].call_args_list[1][0][0])
- def test_post_rolling_restart(self):
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "post_rolling_restart",
- config_dict = json_content,
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES)
- self.assertResourceCalled('Execute', 'hdfs dfsadmin -report -live',
- user = 'hdfs',
- )
- self.assertNoMoreResources()
- def test_prepare_rolling_upgrade__upgrade(self):
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/secured.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- json_content['commandParams']['upgrade_direction'] = 'upgrade'
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "prepare_rolling_upgrade",
- config_dict = json_content,
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = [(0, "Safe mode is OFF in c6401.ambari.apache.org")])
- self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',)
- self.assertResourceCalled('Execute', 'hdfs dfsadmin -rollingUpgrade prepare',
- logoutput = True,
- user = 'hdfs',
- )
- self.assertResourceCalled('Execute', 'hdfs dfsadmin -rollingUpgrade query',
- logoutput = True,
- user = 'hdfs',
- )
- self.assertNoMoreResources()
-
- @patch.object(shell, "call")
- def test_prepare_rolling_upgrade__downgrade(self, shell_call_mock):
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/secured.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- json_content['commandParams']['upgrade_direction'] = 'downgrade'
- # Mock safemode_check call
- shell_call_mock.return_value = 0, "Safe mode is OFF in c6401.ambari.apache.org"
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "prepare_rolling_upgrade",
- config_dict = json_content,
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES)
- self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',)
- self.assertNoMoreResources()
- def test_finalize_rolling_upgrade(self):
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "finalize_rolling_upgrade",
- config_dict = json_content,
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES)
- self.assertResourceCalled('Execute', 'hdfs dfsadmin -rollingUpgrade query',
- logoutput = True,
- user = 'hdfs',
- )
- self.assertResourceCalled('Execute', 'hdfs dfsadmin -rollingUpgrade finalize',
- logoutput = True,
- user = 'hdfs',
- )
- self.assertResourceCalled('Execute', 'hdfs dfsadmin -rollingUpgrade query',
- logoutput = True,
- user = 'hdfs',
- )
- self.assertNoMoreResources()
- @patch("resource_management.core.shell.call")
- def test_pre_rolling_restart_21_and_lower_params(self, call_mock):
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- json_content['hostLevelParams']['stack_name'] = 'HDP'
- json_content['hostLevelParams']['stack_version'] = '2.0'
- mocks_dict = {}
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "pre_rolling_restart",
- config_dict = json_content,
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = [(0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None)],
- mocks_dict = mocks_dict)
- import sys
- self.assertEquals("/etc/hadoop/conf", sys.modules["params"].hadoop_conf_dir)
- self.assertEquals("/usr/lib/hadoop/libexec", sys.modules["params"].hadoop_libexec_dir)
- self.assertEquals("/usr/bin", sys.modules["params"].hadoop_bin_dir)
- self.assertEquals("/usr/lib/hadoop/sbin", sys.modules["params"].hadoop_bin)
- @patch("resource_management.core.shell.call")
- def test_pre_rolling_restart_22_params(self, call_mock):
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- version = '2.2.0.0-1234'
- del json_content['commandParams']['version']
- json_content['hostLevelParams']['stack_name'] = 'HDP'
- json_content['hostLevelParams']['stack_version'] = '2.2'
- mocks_dict = {}
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "pre_rolling_restart",
- config_dict = json_content,
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = [(0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None)],
- mocks_dict = mocks_dict)
- import sys
- self.assertEquals("/usr/hdp/current/hadoop-client/conf", sys.modules["params"].hadoop_conf_dir)
- self.assertEquals("/usr/hdp/current/hadoop-client/libexec", sys.modules["params"].hadoop_libexec_dir)
- self.assertEquals("/usr/hdp/current/hadoop-client/bin", sys.modules["params"].hadoop_bin_dir)
- self.assertEquals("/usr/hdp/current/hadoop-client/sbin", sys.modules["params"].hadoop_bin)
- @patch("resource_management.core.shell.call")
- def test_pre_rolling_restart_23_params(self, call_mock):
- config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
- with open(config_file, "r") as f:
- json_content = json.load(f)
- version = '2.3.0.0-1234'
- json_content['commandParams']['version'] = version
- json_content['commandParams']['upgrade_direction'] = 'upgrade'
- json_content['hostLevelParams']['stack_name'] = 'HDP'
- json_content['hostLevelParams']['stack_version'] = '2.3'
- mocks_dict = {}
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
- classname = "NameNode",
- command = "pre_rolling_restart",
- config_dict = json_content,
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = [(0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None)],
- mocks_dict = mocks_dict)
- import sys
- self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/conf", sys.modules["params"].hadoop_conf_dir)
- self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/libexec", sys.modules["params"].hadoop_libexec_dir)
- self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/bin", sys.modules["params"].hadoop_bin_dir)
- self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/sbin", sys.modules["params"].hadoop_bin)
- class Popen_Mock:
- return_value = 1
- lines = ['Time Stamp Iteration# Bytes Already Moved Bytes Left To Move Bytes Being Moved\n',
- 'Jul 28, 2014 5:01:49 PM 0 0 B 5.74 GB 9.79 GB\n',
- 'Jul 28, 2014 5:03:00 PM 1 0 B 5.58 GB 9.79 GB\n',
- '']
- def __call__(self, *args,**kwargs):
- popen = MagicMock()
- popen.returncode = Popen_Mock.return_value
- popen.stdout.readline = MagicMock(side_effect = Popen_Mock.lines)
- return popen
|