|
@@ -17,6 +17,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
See the License for the specific language governing permissions and
|
|
|
limitations under the License.
|
|
|
'''
|
|
|
+import json
|
|
|
import os
|
|
|
from mock.mock import MagicMock, call, patch
|
|
|
from stacks.utils.RMFTestCase import *
|
|
@@ -46,7 +47,7 @@ class TestHiveMetastore(RMFTestCase):
|
|
|
|
|
|
self.assert_configure_default()
|
|
|
self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
|
|
|
- environment = {'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
|
|
|
+ environment = {'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': 'hive'},
|
|
|
not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
|
|
|
user = 'hive',
|
|
|
path = ['/bin:/usr/lib/hive/bin:/usr/bin'],
|
|
@@ -106,7 +107,7 @@ class TestHiveMetastore(RMFTestCase):
|
|
|
user = 'hive',
|
|
|
)
|
|
|
self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
|
|
|
- environment = {'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
|
|
|
+ environment = {'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': 'hive'},
|
|
|
not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
|
|
|
user = 'hive',
|
|
|
path = ['/bin:/usr/lib/hive/bin:/usr/bin'],
|
|
@@ -348,3 +349,148 @@ class TestHiveMetastore(RMFTestCase):
|
|
|
recursive = True,
|
|
|
cd_access = 'a',
|
|
|
)
|
|
|
+
|
|
|
+ @patch("resource_management.core.shell.call")
|
|
|
+ @patch("resource_management.libraries.functions.get_hdp_version")
|
|
|
+ def test_start_ru(self, call_mock, get_hdp_version_mock):
|
|
|
+ get_hdp_version_mock.return_value = '2.3.0.0-1234'
|
|
|
+
|
|
|
+ config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
|
|
|
+ with open(config_file, "r") as f:
|
|
|
+ json_content = json.load(f)
|
|
|
+
|
|
|
+ version = "2.3.0.0-1234"
|
|
|
+ json_content['commandParams']['version'] = version
|
|
|
+ json_content['hostLevelParams']['stack_name'] = "HDP"
|
|
|
+ json_content['hostLevelParams']['stack_version'] = "2.3"
|
|
|
+ json_content['role'] = "HIVE_SERVER"
|
|
|
+ json_content['configurations']['hive-site']['javax.jdo.option.ConnectionPassword'] = "aaa"
|
|
|
+
|
|
|
+ self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_metastore.py",
|
|
|
+ classname = "HiveMetastore",
|
|
|
+ command = "start",
|
|
|
+ command_args = [True],
|
|
|
+ config_dict = json_content,
|
|
|
+ hdp_stack_version = self.STACK_VERSION,
|
|
|
+ target = RMFTestCase.TARGET_COMMON_SERVICES)
|
|
|
+
|
|
|
+ self.assertResourceCalled('Directory', '/etc/hive',
|
|
|
+ mode = 0755)
|
|
|
+
|
|
|
+ self.assertResourceCalled('Directory', '/usr/hdp/current/hive-server2/conf',
|
|
|
+ owner = 'hive',
|
|
|
+ group = 'hadoop',
|
|
|
+ recursive = True)
|
|
|
+
|
|
|
+ self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
|
|
|
+ group = 'hadoop',
|
|
|
+ conf_dir = '/usr/hdp/current/hive-server2/conf',
|
|
|
+ mode = 0644,
|
|
|
+ configuration_attributes = {u'final': {u'mapred.healthChecker.script.path': u'true',
|
|
|
+ u'mapreduce.jobtracker.staging.root.dir': u'true'}},
|
|
|
+ owner = 'hive',
|
|
|
+ configurations = self.getConfig()['configurations']['mapred-site'])
|
|
|
+
|
|
|
+ self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-default.xml.template',
|
|
|
+ owner = 'hive',
|
|
|
+ group = 'hadoop',
|
|
|
+ )
|
|
|
+
|
|
|
+ self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-env.sh.template',
|
|
|
+ owner = 'hive',
|
|
|
+ group = 'hadoop')
|
|
|
+
|
|
|
+ self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-exec-log4j.properties',
|
|
|
+ content = 'log4jproperties\nline2',
|
|
|
+ mode = 420,
|
|
|
+ group = 'hadoop',
|
|
|
+ owner = 'hive')
|
|
|
+
|
|
|
+ self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-log4j.properties',
|
|
|
+ content = 'log4jproperties\nline2',
|
|
|
+ mode = 420,
|
|
|
+ group = 'hadoop',
|
|
|
+ owner = 'hive')
|
|
|
+
|
|
|
+ self.assertResourceCalled('XmlConfig', 'hive-site.xml',
|
|
|
+ group = 'hadoop',
|
|
|
+ conf_dir = '/usr/hdp/current/hive-server2/conf/conf.server',
|
|
|
+ mode = 0644,
|
|
|
+ configuration_attributes = {u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
|
|
|
+ u'javax.jdo.option.ConnectionDriverName': u'true',
|
|
|
+ u'javax.jdo.option.ConnectionPassword': u'true'}},
|
|
|
+ owner = 'hive',
|
|
|
+ configurations = self.getConfig()['configurations']['hive-site'])
|
|
|
+
|
|
|
+ self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hive-env.sh',
|
|
|
+ content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
|
|
|
+ owner = 'hive',
|
|
|
+ group = 'hadoop')
|
|
|
+
|
|
|
+ self.assertResourceCalled('Directory', '/etc/security/limits.d',
|
|
|
+ owner = 'root',
|
|
|
+ group = 'root',
|
|
|
+ recursive = True)
|
|
|
+
|
|
|
+ self.assertResourceCalled('File', '/etc/security/limits.d/hive.conf',
|
|
|
+ content = Template('hive.conf.j2'),
|
|
|
+ owner = 'root',
|
|
|
+ group = 'root',
|
|
|
+ mode = 0644)
|
|
|
+
|
|
|
+ self.assertResourceCalled('Execute', ('cp',
|
|
|
+ '--remove-destination',
|
|
|
+ '/usr/share/java/mysql-connector-java.jar',
|
|
|
+ '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar'),
|
|
|
+ path = ['/bin', '/usr/bin/'],
|
|
|
+ sudo = True)
|
|
|
+
|
|
|
+ self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar',
|
|
|
+ mode = 0644)
|
|
|
+
|
|
|
+ self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar',
|
|
|
+ content = DownloadSource('http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'))
|
|
|
+
|
|
|
+ self.assertResourceCalled('File', '/tmp/start_metastore_script',
|
|
|
+ content = StaticFile('startMetastore.sh'),
|
|
|
+ mode = 0755)
|
|
|
+
|
|
|
+ self.maxDiff = None
|
|
|
+
|
|
|
+ self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/usr/hdp/current/hive-server2/conf/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -initSchema -dbType mysql -userName hive -passWord aaa',
|
|
|
+ not_if = "ambari-sudo.sh su hive -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]export HIVE_CONF_DIR=/usr/hdp/current/hive-server2/conf/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -info -dbType mysql -userName hive -passWord aaa'",
|
|
|
+ user = 'hive')
|
|
|
+
|
|
|
+ self.assertResourceCalled('Directory', '/var/run/hive',
|
|
|
+ owner = 'hive',
|
|
|
+ group = 'hadoop',
|
|
|
+ mode = 0755,
|
|
|
+ recursive = True,
|
|
|
+ cd_access = 'a')
|
|
|
+
|
|
|
+ self.assertResourceCalled('Directory', '/var/log/hive',
|
|
|
+ owner = 'hive',
|
|
|
+ group = 'hadoop',
|
|
|
+ mode = 0755,
|
|
|
+ recursive = True,
|
|
|
+ cd_access = 'a')
|
|
|
+
|
|
|
+ self.assertResourceCalled('Directory', '/var/lib/hive',
|
|
|
+ owner = 'hive',
|
|
|
+ group = 'hadoop',
|
|
|
+ mode = 0755,
|
|
|
+ recursive = True,
|
|
|
+ cd_access = 'a')
|
|
|
+
|
|
|
+ self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
|
|
|
+ environment = {'HADOOP_HOME': '/usr/hdp/2.3.0.0-1234/hadoop', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': '/usr/hdp/current/hive-server2/bin/hive'},
|
|
|
+ not_if = None,
|
|
|
+ user = 'hive',
|
|
|
+ path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/current/hadoop-client/bin'])
|
|
|
+
|
|
|
+ self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive aaa com.mysql.jdbc.Driver',
|
|
|
+ path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
|
|
|
+ tries = 5,
|
|
|
+ try_sleep = 10)
|
|
|
+
|
|
|
+ self.assertNoMoreResources()
|