|
@@ -22,16 +22,20 @@ import socket
|
|
|
import subprocess
|
|
|
|
|
|
from mock.mock import MagicMock, patch
|
|
|
+from resource_management.libraries.functions import version
|
|
|
from resource_management.core import shell
|
|
|
-from resource_management.libraries.functions import dynamic_variable_interpretation
|
|
|
+from resource_management.libraries.script.script import Script
|
|
|
from stacks.utils.RMFTestCase import *
|
|
|
+from resource_management.libraries import functions
|
|
|
|
|
|
+
|
|
|
+@patch.object(functions, "get_hdp_version", new = MagicMock(return_value="2.0.0.0-1234"))
|
|
|
@patch("resource_management.libraries.functions.check_thrift_port_sasl", new=MagicMock())
|
|
|
class TestHiveServer(RMFTestCase):
|
|
|
COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
|
|
|
STACK_VERSION = "2.0.6"
|
|
|
UPGRADE_STACK_VERSION = "2.2"
|
|
|
-
|
|
|
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
|
|
|
def test_configure_default(self):
|
|
|
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
|
|
|
classname = "HiveServer",
|
|
@@ -44,7 +48,7 @@ class TestHiveServer(RMFTestCase):
|
|
|
self.assertNoMoreResources()
|
|
|
|
|
|
@patch("socket.socket")
|
|
|
- @patch.object(dynamic_variable_interpretation, "copy_tarballs_to_hdfs", new=MagicMock())
|
|
|
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
|
|
|
def test_start_default(self, socket_mock):
|
|
|
s = socket_mock.return_value
|
|
|
|
|
@@ -83,9 +87,8 @@ class TestHiveServer(RMFTestCase):
|
|
|
)
|
|
|
self.assertNoMoreResources()
|
|
|
|
|
|
-
|
|
|
- @patch.object(dynamic_variable_interpretation, "_get_tar_source_and_dest_folder")
|
|
|
- def test_start_default_no_copy(self, get_tar_mock):
|
|
|
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
|
|
|
+ def test_start_default_no_copy(self):
|
|
|
|
|
|
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
|
|
|
classname = "HiveServer",
|
|
@@ -95,7 +98,6 @@ class TestHiveServer(RMFTestCase):
|
|
|
target = RMFTestCase.TARGET_COMMON_SERVICES
|
|
|
)
|
|
|
|
|
|
- get_tar_mock.return_value = ("a", "b")
|
|
|
self.assert_configure_default()
|
|
|
|
|
|
self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
|
|
@@ -118,10 +120,9 @@ class TestHiveServer(RMFTestCase):
|
|
|
timeout = 30,
|
|
|
)
|
|
|
self.assertNoMoreResources()
|
|
|
- self.assertFalse(get_tar_mock.called)
|
|
|
|
|
|
- @patch.object(dynamic_variable_interpretation, "_get_tar_source_and_dest_folder")
|
|
|
- def test_start_default_alt_tmp(self, get_tar_mock):
|
|
|
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
|
|
|
+ def test_start_default_alt_tmp(self):
|
|
|
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
|
|
|
classname = "HiveServer",
|
|
|
command = "start",
|
|
@@ -130,7 +131,6 @@ class TestHiveServer(RMFTestCase):
|
|
|
target = RMFTestCase.TARGET_COMMON_SERVICES
|
|
|
)
|
|
|
|
|
|
- get_tar_mock.return_value = ("a", "b")
|
|
|
self.assert_configure_default(no_tmp=True)
|
|
|
|
|
|
self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
|
|
@@ -153,11 +153,10 @@ class TestHiveServer(RMFTestCase):
|
|
|
timeout = 30,
|
|
|
)
|
|
|
self.assertNoMoreResources()
|
|
|
- self.assertFalse(get_tar_mock.called)
|
|
|
|
|
|
|
|
|
- @patch.object(dynamic_variable_interpretation, "_get_tar_source_and_dest_folder")
|
|
|
- def test_start_default_alt_nn_ha_tmp(self, get_tar_mock):
|
|
|
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
|
|
|
+ def test_start_default_alt_nn_ha_tmp(self):
|
|
|
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
|
|
|
classname = "HiveServer",
|
|
|
command = "start",
|
|
@@ -166,7 +165,6 @@ class TestHiveServer(RMFTestCase):
|
|
|
target = RMFTestCase.TARGET_COMMON_SERVICES
|
|
|
)
|
|
|
|
|
|
- get_tar_mock.return_value = ("a", "b")
|
|
|
self.assert_configure_default(no_tmp=True)
|
|
|
|
|
|
self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
|
|
@@ -189,9 +187,8 @@ class TestHiveServer(RMFTestCase):
|
|
|
timeout = 30,
|
|
|
)
|
|
|
self.assertNoMoreResources()
|
|
|
- self.assertFalse(get_tar_mock.called)
|
|
|
|
|
|
- @patch.object(dynamic_variable_interpretation, "copy_tarballs_to_hdfs", new=MagicMock())
|
|
|
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
|
|
|
def test_stop_default(self):
|
|
|
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
|
|
|
classname = "HiveServer",
|
|
@@ -217,7 +214,7 @@ class TestHiveServer(RMFTestCase):
|
|
|
|
|
|
self.assertNoMoreResources()
|
|
|
|
|
|
-
|
|
|
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
|
|
|
def test_configure_secured(self):
|
|
|
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
|
|
|
classname = "HiveServer",
|
|
@@ -231,6 +228,7 @@ class TestHiveServer(RMFTestCase):
|
|
|
|
|
|
@patch("hive_service.check_fs_root")
|
|
|
@patch("socket.socket")
|
|
|
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
|
|
|
def test_start_secured(self, socket_mock, check_fs_root_mock):
|
|
|
s = socket_mock.return_value
|
|
|
|
|
@@ -276,6 +274,7 @@ class TestHiveServer(RMFTestCase):
|
|
|
|
|
|
|
|
|
@patch("socket.socket")
|
|
|
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
|
|
|
def test_stop_secured(self, socket_mock):
|
|
|
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
|
|
|
classname = "HiveServer",
|
|
@@ -302,51 +301,89 @@ class TestHiveServer(RMFTestCase):
|
|
|
self.assertNoMoreResources()
|
|
|
|
|
|
def assert_configure_default(self, no_tmp = False):
|
|
|
- self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
|
|
|
- security_enabled=False,
|
|
|
- keytab=UnknownConfigurationMock(),
|
|
|
- conf_dir='/etc/hadoop/conf',
|
|
|
- hdfs_user='hdfs',
|
|
|
- kinit_path_local='/usr/bin/kinit',
|
|
|
- mode=0777,
|
|
|
- owner='hive',
|
|
|
- bin_dir='/usr/bin',
|
|
|
- action=['create_delayed'],
|
|
|
- )
|
|
|
- self.assertResourceCalled('HdfsDirectory', '/user/hive',
|
|
|
- security_enabled=False,
|
|
|
- keytab=UnknownConfigurationMock(),
|
|
|
- conf_dir='/etc/hadoop/conf',
|
|
|
- hdfs_user='hdfs',
|
|
|
- kinit_path_local='/usr/bin/kinit',
|
|
|
- mode=0700,
|
|
|
- owner='hive',
|
|
|
- bin_dir='/usr/bin',
|
|
|
- action=['create_delayed'],
|
|
|
+ self.assertResourceCalled('HdfsResource', '/apps/webhcat',
|
|
|
+ security_enabled = False,
|
|
|
+ hadoop_bin_dir = '/usr/bin',
|
|
|
+ keytab = UnknownConfigurationMock(),
|
|
|
+ kinit_path_local = '/usr/bin/kinit',
|
|
|
+ user = 'hdfs',
|
|
|
+ owner = 'hcat',
|
|
|
+ hadoop_conf_dir = '/etc/hadoop/conf',
|
|
|
+ type = 'directory',
|
|
|
+ action = ['create_on_execute'],
|
|
|
+ mode = 0755,
|
|
|
+ )
|
|
|
+ self.assertResourceCalled('HdfsResource', '/user/hcat',
|
|
|
+ security_enabled = False,
|
|
|
+ hadoop_bin_dir = '/usr/bin',
|
|
|
+ keytab = UnknownConfigurationMock(),
|
|
|
+ kinit_path_local = '/usr/bin/kinit',
|
|
|
+ user = 'hdfs',
|
|
|
+ owner = 'hcat',
|
|
|
+ hadoop_conf_dir = '/etc/hadoop/conf',
|
|
|
+ type = 'directory',
|
|
|
+ action = ['create_on_execute'],
|
|
|
+ mode = 0755,
|
|
|
+ )
|
|
|
+ self.assertResourceCalled('HdfsResource', '/apps/webhcat/hive.tar.gz',
|
|
|
+ security_enabled = False,
|
|
|
+ hadoop_conf_dir = '/etc/hadoop/conf',
|
|
|
+ keytab = UnknownConfigurationMock(),
|
|
|
+ source = '/usr/share/HDP-webhcat/hive.tar.gz',
|
|
|
+ kinit_path_local = '/usr/bin/kinit',
|
|
|
+ user = 'hdfs',
|
|
|
+ action = ['create_on_execute'],
|
|
|
+ group = 'hadoop',
|
|
|
+ hadoop_bin_dir = '/usr/bin',
|
|
|
+ type = 'file',
|
|
|
+ mode = 0755,
|
|
|
+ )
|
|
|
+ self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
|
|
|
+ security_enabled = False,
|
|
|
+ hadoop_bin_dir = '/usr/bin',
|
|
|
+ keytab = UnknownConfigurationMock(),
|
|
|
+ kinit_path_local = '/usr/bin/kinit',
|
|
|
+ user = 'hdfs',
|
|
|
+ owner = 'hive',
|
|
|
+ hadoop_conf_dir = '/etc/hadoop/conf',
|
|
|
+ type = 'directory',
|
|
|
+ action = ['create_on_execute'],
|
|
|
+ mode = 0777,
|
|
|
+ )
|
|
|
+ self.assertResourceCalled('HdfsResource', '/user/hive',
|
|
|
+ security_enabled = False,
|
|
|
+ hadoop_bin_dir = '/usr/bin',
|
|
|
+ keytab = UnknownConfigurationMock(),
|
|
|
+ kinit_path_local = '/usr/bin/kinit',
|
|
|
+ user = 'hdfs',
|
|
|
+ owner = 'hive',
|
|
|
+ hadoop_conf_dir = '/etc/hadoop/conf',
|
|
|
+ type = 'directory',
|
|
|
+ action = ['create_on_execute'],
|
|
|
+ mode = 0700,
|
|
|
)
|
|
|
-
|
|
|
if not no_tmp:
|
|
|
- self.assertResourceCalled('HdfsDirectory', '/custompath/tmp/hive',
|
|
|
- security_enabled=False,
|
|
|
- keytab=UnknownConfigurationMock(),
|
|
|
- conf_dir='/etc/hadoop/conf',
|
|
|
- hdfs_user='hdfs',
|
|
|
- kinit_path_local='/usr/bin/kinit',
|
|
|
- mode=0777,
|
|
|
- owner='hive',
|
|
|
- group='hdfs',
|
|
|
- action=['create_delayed'],
|
|
|
- bin_dir='/usr/bin',
|
|
|
+ self.assertResourceCalled('HdfsResource', '/custompath/tmp/hive',
|
|
|
+ security_enabled = False,
|
|
|
+ hadoop_conf_dir = '/etc/hadoop/conf',
|
|
|
+ keytab = UnknownConfigurationMock(),
|
|
|
+ kinit_path_local = '/usr/bin/kinit',
|
|
|
+ user = 'hdfs',
|
|
|
+ owner = 'hive',
|
|
|
+ group = 'hdfs',
|
|
|
+ hadoop_bin_dir = '/usr/bin',
|
|
|
+ type = 'directory',
|
|
|
+ action = ['create_on_execute'],
|
|
|
+ mode = 0777,
|
|
|
)
|
|
|
-
|
|
|
- self.assertResourceCalled('HdfsDirectory', None,
|
|
|
- security_enabled=False,
|
|
|
- keytab=UnknownConfigurationMock(),
|
|
|
- conf_dir='/etc/hadoop/conf',
|
|
|
- hdfs_user='hdfs',
|
|
|
- kinit_path_local='/usr/bin/kinit',
|
|
|
- action=['create'],
|
|
|
- bin_dir='/usr/bin',
|
|
|
+ self.assertResourceCalled('HdfsResource', None,
|
|
|
+ security_enabled = False,
|
|
|
+ hadoop_bin_dir = '/usr/bin',
|
|
|
+ keytab = UnknownConfigurationMock(),
|
|
|
+ kinit_path_local = '/usr/bin/kinit',
|
|
|
+ user = 'hdfs',
|
|
|
+ action = ['execute'],
|
|
|
+ hadoop_conf_dir = '/etc/hadoop/conf',
|
|
|
)
|
|
|
self.assertResourceCalled('Directory', '/etc/hive',
|
|
|
mode=0755,
|
|
@@ -453,49 +490,88 @@ class TestHiveServer(RMFTestCase):
|
|
|
|
|
|
|
|
|
def assert_configure_secured(self):
|
|
|
- self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
|
|
|
- security_enabled=True,
|
|
|
- keytab='/etc/security/keytabs/hdfs.headless.keytab',
|
|
|
- conf_dir='/etc/hadoop/conf',
|
|
|
- hdfs_user='hdfs',
|
|
|
- kinit_path_local='/usr/bin/kinit',
|
|
|
- mode=0777,
|
|
|
- owner='hive',
|
|
|
- bin_dir='/usr/bin',
|
|
|
- action=['create_delayed'],
|
|
|
- )
|
|
|
- self.assertResourceCalled('HdfsDirectory', '/user/hive',
|
|
|
- security_enabled=True,
|
|
|
- keytab='/etc/security/keytabs/hdfs.headless.keytab',
|
|
|
- conf_dir='/etc/hadoop/conf',
|
|
|
- hdfs_user='hdfs',
|
|
|
- kinit_path_local='/usr/bin/kinit',
|
|
|
- mode=0700,
|
|
|
- owner='hive',
|
|
|
- bin_dir='/usr/bin',
|
|
|
- action=['create_delayed'],
|
|
|
- )
|
|
|
- self.assertResourceCalled('HdfsDirectory', '/custompath/tmp/hive',
|
|
|
- security_enabled=True,
|
|
|
- keytab='/etc/security/keytabs/hdfs.headless.keytab',
|
|
|
- conf_dir='/etc/hadoop/conf',
|
|
|
- hdfs_user='hdfs',
|
|
|
- kinit_path_local='/usr/bin/kinit',
|
|
|
- mode=0777,
|
|
|
- owner='hive',
|
|
|
- group='hdfs',
|
|
|
- action=['create_delayed'],
|
|
|
- bin_dir='/usr/bin',
|
|
|
- )
|
|
|
-
|
|
|
- self.assertResourceCalled('HdfsDirectory', None,
|
|
|
- security_enabled=True,
|
|
|
- keytab='/etc/security/keytabs/hdfs.headless.keytab',
|
|
|
- conf_dir='/etc/hadoop/conf',
|
|
|
- hdfs_user='hdfs',
|
|
|
- kinit_path_local='/usr/bin/kinit',
|
|
|
- action=['create'],
|
|
|
- bin_dir='/usr/bin',
|
|
|
+ self.assertResourceCalled('HdfsResource', '/apps/webhcat',
|
|
|
+ security_enabled = True,
|
|
|
+ hadoop_bin_dir = '/usr/bin',
|
|
|
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
|
|
|
+ kinit_path_local = '/usr/bin/kinit',
|
|
|
+ user = 'hdfs',
|
|
|
+ owner = 'hcat',
|
|
|
+ hadoop_conf_dir = '/etc/hadoop/conf',
|
|
|
+ type = 'directory',
|
|
|
+ action = ['create_on_execute'],
|
|
|
+ mode = 0755,
|
|
|
+ )
|
|
|
+ self.assertResourceCalled('HdfsResource', '/user/hcat',
|
|
|
+ security_enabled = True,
|
|
|
+ hadoop_bin_dir = '/usr/bin',
|
|
|
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
|
|
|
+ kinit_path_local = '/usr/bin/kinit',
|
|
|
+ user = 'hdfs',
|
|
|
+ owner = 'hcat',
|
|
|
+ hadoop_conf_dir = '/etc/hadoop/conf',
|
|
|
+ type = 'directory',
|
|
|
+ action = ['create_on_execute'],
|
|
|
+ mode = 0755,
|
|
|
+ )
|
|
|
+ self.assertResourceCalled('HdfsResource', '/apps/webhcat/hive.tar.gz',
|
|
|
+ security_enabled = True,
|
|
|
+ hadoop_conf_dir = '/etc/hadoop/conf',
|
|
|
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
|
|
|
+ source = '/usr/share/HDP-webhcat/hive.tar.gz',
|
|
|
+ kinit_path_local = '/usr/bin/kinit',
|
|
|
+ user = 'hdfs',
|
|
|
+ action = ['create_on_execute'],
|
|
|
+ group = 'hadoop',
|
|
|
+ hadoop_bin_dir = '/usr/bin',
|
|
|
+ type = 'file',
|
|
|
+ mode = 0755,
|
|
|
+ )
|
|
|
+ self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
|
|
|
+ security_enabled = True,
|
|
|
+ hadoop_bin_dir = '/usr/bin',
|
|
|
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
|
|
|
+ kinit_path_local = '/usr/bin/kinit',
|
|
|
+ user = 'hdfs',
|
|
|
+ owner = 'hive',
|
|
|
+ hadoop_conf_dir = '/etc/hadoop/conf',
|
|
|
+ type = 'directory',
|
|
|
+ action = ['create_on_execute'],
|
|
|
+ mode = 0777,
|
|
|
+ )
|
|
|
+ self.assertResourceCalled('HdfsResource', '/user/hive',
|
|
|
+ security_enabled = True,
|
|
|
+ hadoop_bin_dir = '/usr/bin',
|
|
|
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
|
|
|
+ kinit_path_local = '/usr/bin/kinit',
|
|
|
+ user = 'hdfs',
|
|
|
+ owner = 'hive',
|
|
|
+ hadoop_conf_dir = '/etc/hadoop/conf',
|
|
|
+ type = 'directory',
|
|
|
+ action = ['create_on_execute'],
|
|
|
+ mode = 0700,
|
|
|
+ )
|
|
|
+ self.assertResourceCalled('HdfsResource', '/custompath/tmp/hive',
|
|
|
+ security_enabled = True,
|
|
|
+ hadoop_conf_dir = '/etc/hadoop/conf',
|
|
|
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
|
|
|
+ kinit_path_local = '/usr/bin/kinit',
|
|
|
+ user = 'hdfs',
|
|
|
+ owner = 'hive',
|
|
|
+ group = 'hdfs',
|
|
|
+ hadoop_bin_dir = '/usr/bin',
|
|
|
+ type = 'directory',
|
|
|
+ action = ['create_on_execute'],
|
|
|
+ mode = 0777,
|
|
|
+ )
|
|
|
+ self.assertResourceCalled('HdfsResource', None,
|
|
|
+ security_enabled = True,
|
|
|
+ hadoop_bin_dir = '/usr/bin',
|
|
|
+ keytab = '/etc/security/keytabs/hdfs.headless.keytab',
|
|
|
+ kinit_path_local = '/usr/bin/kinit',
|
|
|
+ user = 'hdfs',
|
|
|
+ action = ['execute'],
|
|
|
+ hadoop_conf_dir = '/etc/hadoop/conf',
|
|
|
)
|
|
|
self.assertResourceCalled('Directory', '/etc/hive',
|
|
|
mode=0755,
|
|
@@ -624,6 +700,7 @@ class TestHiveServer(RMFTestCase):
|
|
|
self.assert_configure_default()
|
|
|
|
|
|
|
|
|
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=True))
|
|
|
@patch("hive_server.HiveServer.pre_rolling_restart")
|
|
|
@patch("hive_server.HiveServer.start")
|
|
|
def test_stop_during_upgrade(self, hive_server_start_mock,
|
|
@@ -771,6 +848,7 @@ class TestHiveServer(RMFTestCase):
|
|
|
)
|
|
|
put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
|
|
|
|
|
|
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=True))
|
|
|
def test_pre_rolling_restart(self):
|
|
|
config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
|
|
|
with open(config_file, "r") as f:
|
|
@@ -785,9 +863,32 @@ class TestHiveServer(RMFTestCase):
|
|
|
target = RMFTestCase.TARGET_COMMON_SERVICES)
|
|
|
self.assertResourceCalled('Execute',
|
|
|
'hdp-select set hive-server2 %s' % version,)
|
|
|
+ self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.0.0.0-1234/mapreduce//mapreduce.tar.gz',
|
|
|
+ security_enabled = False,
|
|
|
+ hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
|
|
|
+ keytab = UnknownConfigurationMock(),
|
|
|
+ source = '/usr/hdp/current/hadoop-client/mapreduce.tar.gz',
|
|
|
+ kinit_path_local = '/usr/bin/kinit',
|
|
|
+ user = 'hdfs',
|
|
|
+ action = ['create_on_execute'],
|
|
|
+ group = 'hadoop',
|
|
|
+ hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
|
|
|
+ type = 'file',
|
|
|
+ mode = 0444,
|
|
|
+ )
|
|
|
+ self.assertResourceCalled('HdfsResource', None,
|
|
|
+ security_enabled = False,
|
|
|
+ hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
|
|
|
+ keytab = UnknownConfigurationMock(),
|
|
|
+ kinit_path_local = '/usr/bin/kinit',
|
|
|
+ user = 'hdfs',
|
|
|
+ action = ['execute'],
|
|
|
+ hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
|
|
|
+ )
|
|
|
self.assertNoMoreResources()
|
|
|
|
|
|
@patch("resource_management.core.shell.call")
|
|
|
+ @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=True))
|
|
|
def test_pre_rolling_restart_23(self, call_mock):
|
|
|
config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
|
|
|
with open(config_file, "r") as f:
|
|
@@ -807,6 +908,28 @@ class TestHiveServer(RMFTestCase):
|
|
|
|
|
|
self.assertResourceCalled('Execute',
|
|
|
'hdp-select set hive-server2 %s' % version,)
|
|
|
+ self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.0.0.0-1234/mapreduce//mapreduce.tar.gz',
|
|
|
+ security_enabled = False,
|
|
|
+ hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
|
|
|
+ keytab = UnknownConfigurationMock(),
|
|
|
+ source = '/usr/hdp/current/hadoop-client/mapreduce.tar.gz',
|
|
|
+ kinit_path_local = '/usr/bin/kinit',
|
|
|
+ user = 'hdfs',
|
|
|
+ action = ['create_on_execute'],
|
|
|
+ group = 'hadoop',
|
|
|
+ hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
|
|
|
+ type = 'file',
|
|
|
+ mode = 0444,
|
|
|
+ )
|
|
|
+ self.assertResourceCalled('HdfsResource', None,
|
|
|
+ security_enabled = False,
|
|
|
+ hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
|
|
|
+ keytab = UnknownConfigurationMock(),
|
|
|
+ kinit_path_local = '/usr/bin/kinit',
|
|
|
+ user = 'hdfs',
|
|
|
+ action = ['execute'],
|
|
|
+ hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
|
|
|
+ )
|
|
|
self.assertNoMoreResources()
|
|
|
|
|
|
self.assertEquals(2, mocks_dict['call'].call_count)
|