123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519 |
- #!/usr/bin/env python
- '''
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- '''
- import os
- from mock.mock import MagicMock, call, patch
- from stacks.utils.RMFTestCase import *
- import socket
- class TestHiveServer(RMFTestCase):
- def test_configure_default(self):
- self.executeScript("2.0.6/services/HIVE/package/scripts/hive_server.py",
- classname = "HiveServer",
- command = "configure",
- config_file="default.json"
- )
- self.assert_configure_default()
- self.assertNoMoreResources()
- @patch("hive_service.check_fs_root")
- @patch("socket.socket")
- def test_start_default(self, socket_mock, check_fs_root_mock):
- s = socket_mock.return_value
-
- self.executeScript("2.0.6/services/HIVE/package/scripts/hive_server.py",
- classname = "HiveServer",
- command = "start",
- config_file="default.json"
- )
- self.assert_configure_default()
- self.assertResourceCalled('HdfsDirectory', '/apps/tez/',
- action = ['create_delayed'],
- mode = 0755,
- owner = 'tez',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- bin_dir = '/usr/bin',
- kinit_path_local = "/usr/bin/kinit"
- )
- self.assertResourceCalled('HdfsDirectory', '/apps/tez/lib/',
- action = ['create_delayed'],
- mode = 0755,
- owner = 'tez',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- bin_dir = '/usr/bin',
- kinit_path_local = "/usr/bin/kinit"
- )
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- bin_dir = '/usr/bin',
- action = ['create']
- )
- self.assertResourceCalled('CopyFromLocal', '/usr/lib/tez/tez*.jar',
- mode=0755,
- owner='tez',
- dest_dir='/apps/tez/',
- kinnit_if_needed='',
- hadoop_conf_dir='/etc/hadoop/conf',
- hdfs_user='hdfs'
- )
- self.assertResourceCalled('CopyFromLocal', '/usr/lib/tez/lib/*.jar',
- mode=0755,
- owner='tez',
- dest_dir='/apps/tez/lib/',
- kinnit_if_needed='',
- hadoop_conf_dir='/etc/hadoop/conf',
- hdfs_user='hdfs'
- )
- self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
- not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
- environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin",
- 'HADOOP_HOME' : '/usr'},
- user = 'hive'
- )
- self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
- path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'], tries=5, try_sleep=10
- )
- self.assertNoMoreResources()
- self.assertTrue(check_fs_root_mock.called)
- self.assertTrue(socket_mock.called)
- self.assertTrue(s.close.called)
- @patch("socket.socket")
- def test_stop_default(self, socket_mock):
- self.executeScript("2.0.6/services/HIVE/package/scripts/hive_server.py",
- classname = "HiveServer",
- command = "stop",
- config_file="default.json"
- )
- self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive-server.pid',
- not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)'
- )
-
- self.assertNoMoreResources()
- self.assertFalse(socket_mock.called)
-
- def test_configure_secured(self):
- self.executeScript("2.0.6/services/HIVE/package/scripts/hive_server.py",
- classname = "HiveServer",
- command = "configure",
- config_file="secured.json"
- )
- self.assert_configure_secured()
- self.assertNoMoreResources()
- @patch("hive_service.check_fs_root")
- @patch("socket.socket")
- def test_start_secured(self, socket_mock, check_fs_root_mock):
- s = socket_mock.return_value
- self.executeScript("2.0.6/services/HIVE/package/scripts/hive_server.py",
- classname = "HiveServer",
- command = "start",
- config_file="secured.json"
- )
- self.assert_configure_secured()
- self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
- not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
- environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin",
- 'HADOOP_HOME' : '/usr'},
- user = 'hive'
- )
- self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
- path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'], tries=5, try_sleep=10
- )
- self.assertNoMoreResources()
- self.assertTrue(check_fs_root_mock.called)
- self.assertTrue(socket_mock.called)
- self.assertTrue(s.close.called)
- @patch("socket.socket")
- def test_stop_secured(self, socket_mock):
- self.executeScript("2.0.6/services/HIVE/package/scripts/hive_server.py",
- classname = "HiveServer",
- command = "stop",
- config_file="secured.json"
- )
- self.assertResourceCalled('Execute', 'kill `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1 && rm -f /var/run/hive/hive-server.pid',
- not_if = '! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1)'
- )
-
- self.assertNoMoreResources()
- self.assertFalse(socket_mock.called)
- def assert_configure_default(self):
- self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0777,
- owner = 'hive',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/user/hive',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0700,
- owner = 'hive',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- bin_dir = '/usr/bin',
- action = ['create'],
- )
- self.assertResourceCalled('Directory', '/etc/hive/conf.server',
- owner = 'hive',
- group = 'hadoop',
- recursive = True,
- )
- self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
- group = 'hadoop',
- conf_dir = '/etc/hive/conf.server',
- mode = 0644,
- configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
- owner = 'hive',
- configurations = self.getConfig()['configurations']['mapred-site'],
- )
- self.assertResourceCalled('XmlConfig', 'hive-site.xml',
- group = 'hadoop',
- conf_dir = '/etc/hive/conf.server',
- mode = 0644,
- configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
- owner = 'hive',
- configurations = self.getConfig()['configurations']['hive-site'],
- )
- self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
- content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
- owner = 'hive',
- group = 'hadoop',
- )
- self.assertResourceCalled('File', '/etc/hive/conf.server/hive-default.xml.template',
- owner = 'hive',
- group = 'hadoop',
- )
- self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh.template',
- owner = 'hive',
- group = 'hadoop',
- )
- self.assertResourceCalled('File', '/etc/hive/conf.server/hive-exec-log4j.properties',
- content = 'log4jproperties\nline2',
- owner = 'hive',
- group = 'hadoop',
- mode = 0644,
- )
- self.assertResourceCalled('File', '/etc/hive/conf.server/hive-log4j.properties',
- content = 'log4jproperties\nline2',
- owner = 'hive',
- group = 'hadoop',
- mode = 0644,
- )
- self.assertResourceCalled('Directory', '/etc/hive/conf',
- owner = 'hive',
- group = 'hadoop',
- recursive = True,
- )
- self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
- group = 'hadoop',
- conf_dir = '/etc/hive/conf',
- mode = 0644,
- configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
- owner = 'hive',
- configurations = self.getConfig()['configurations']['mapred-site'],
- )
- self.assertResourceCalled('XmlConfig', 'hive-site.xml',
- group = 'hadoop',
- conf_dir = '/etc/hive/conf',
- mode = 0644,
- configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
- owner = 'hive',
- configurations = self.getConfig()['configurations']['hive-site'],
- )
- self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh',
- content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
- owner = 'hive',
- group = 'hadoop',
- )
- self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
- owner = 'hive',
- group = 'hadoop',
- )
- self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
- owner = 'hive',
- group = 'hadoop',
- )
- self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
- content = 'log4jproperties\nline2',
- owner = 'hive',
- group = 'hadoop',
- mode = 0644,
- )
- self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
- content = 'log4jproperties\nline2',
- owner = 'hive',
- group = 'hadoop',
- mode = 0644,
- )
- self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
- creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
- path = ['/bin', '/usr/bin/'],
- environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
- not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
- )
- self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
- environment = {'no_proxy': 'c6401.ambari.apache.org'},
- not_if = '[ -f DBConnectionVerification.jar]',
- )
- self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
- content = Template('startHiveserver2.sh.j2'),
- mode = 0755,
- )
- self.assertResourceCalled('Directory', '/var/run/hive',
- owner = 'hive',
- group = 'hadoop',
- mode = 0755,
- recursive = True,
- )
- self.assertResourceCalled('Directory', '/var/log/hive',
- owner = 'hive',
- group = 'hadoop',
- mode = 0755,
- recursive = True,
- )
- self.assertResourceCalled('Directory', '/var/lib/hive',
- owner = 'hive',
- group = 'hadoop',
- mode = 0755,
- recursive = True,
- )
-
- def assert_configure_secured(self):
- self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
- security_enabled = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- bin_dir = '/usr/bin',
- mode = 0777,
- owner = 'hive',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/user/hive',
- security_enabled = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0700,
- bin_dir = '/usr/bin',
- owner = 'hive',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = True,
- keytab = '/etc/security/keytabs/hdfs.headless.keytab',
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- bin_dir = '/usr/bin',
- kinit_path_local = '/usr/bin/kinit',
- action = ['create'],
- )
- self.assertResourceCalled('Directory', '/etc/hive/conf.server',
- owner = 'hive',
- group = 'hadoop',
- recursive = True,
- )
- self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
- group = 'hadoop',
- conf_dir = '/etc/hive/conf.server',
- mode = 0644,
- configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
- owner = 'hive',
- configurations = self.getConfig()['configurations']['mapred-site'],
- )
- self.assertResourceCalled('XmlConfig', 'hive-site.xml',
- group = 'hadoop',
- conf_dir = '/etc/hive/conf.server',
- mode = 0644,
- configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
- owner = 'hive',
- configurations = self.getConfig()['configurations']['hive-site'],
- )
- self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
- content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
- owner = 'hive',
- group = 'hadoop',
- )
- self.assertResourceCalled('File', '/etc/hive/conf.server/hive-default.xml.template',
- owner = 'hive',
- group = 'hadoop',
- )
- self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh.template',
- owner = 'hive',
- group = 'hadoop',
- )
- self.assertResourceCalled('File', '/etc/hive/conf.server/hive-exec-log4j.properties',
- content = 'log4jproperties\nline2',
- owner = 'hive',
- group = 'hadoop',
- mode = 0644,
- )
- self.assertResourceCalled('File', '/etc/hive/conf.server/hive-log4j.properties',
- content = 'log4jproperties\nline2',
- owner = 'hive',
- group = 'hadoop',
- mode = 0644,
- )
- self.assertResourceCalled('Directory', '/etc/hive/conf',
- owner = 'hive',
- group = 'hadoop',
- recursive = True,
- )
- self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
- group = 'hadoop',
- conf_dir = '/etc/hive/conf',
- mode = 0644,
- configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
- owner = 'hive',
- configurations = self.getConfig()['configurations']['mapred-site'],
- )
- self.assertResourceCalled('XmlConfig', 'hive-site.xml',
- group = 'hadoop',
- conf_dir = '/etc/hive/conf',
- mode = 0644,
- configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
- owner = 'hive',
- configurations = self.getConfig()['configurations']['hive-site'],
- )
- self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh',
- content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
- owner = 'hive',
- group = 'hadoop',
- )
- self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
- owner = 'hive',
- group = 'hadoop',
- )
- self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
- owner = 'hive',
- group = 'hadoop',
- )
- self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
- content = 'log4jproperties\nline2',
- owner = 'hive',
- group = 'hadoop',
- mode = 0644,
- )
- self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
- content = 'log4jproperties\nline2',
- owner = 'hive',
- group = 'hadoop',
- mode = 0644,
- )
- self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
- creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
- path = ['/bin', '/usr/bin/'],
- environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
- not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
- )
- self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
- environment = {'no_proxy': 'c6401.ambari.apache.org'},
- not_if = '[ -f DBConnectionVerification.jar]',
- )
- self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
- content = Template('startHiveserver2.sh.j2'),
- mode = 0755,
- )
- self.assertResourceCalled('Directory', '/var/run/hive',
- owner = 'hive',
- group = 'hadoop',
- mode = 0755,
- recursive = True,
- )
- self.assertResourceCalled('Directory', '/var/log/hive',
- owner = 'hive',
- group = 'hadoop',
- mode = 0755,
- recursive = True,
- )
- self.assertResourceCalled('Directory', '/var/lib/hive',
- owner = 'hive',
- group = 'hadoop',
- mode = 0755,
- recursive = True,
- )
- @patch("hive_service.check_fs_root")
- @patch("time.time")
- @patch("socket.socket")
- def test_socket_timeout(self, socket_mock, time_mock, check_fs_root_mock):
- s = socket_mock.return_value
- s.connect = MagicMock()
- s.connect.side_effect = socket.error("")
-
- time_mock.side_effect = [0, 1000, 2000, 3000, 4000]
-
- try:
- self.executeScript("2.0.6/services/HIVE/package/scripts/hive_server.py",
- classname = "HiveServer",
- command = "start",
- config_file="default.json"
- )
-
- self.fail("Script failure due to socket error was expected")
- except:
- self.assert_configure_default()
- self.assertTrue(socket_mock.called)
- self.assertTrue(s.close.called)
|