12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394 |
- #!/usr/bin/env python
- '''
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- '''
- from stacks.utils.RMFTestCase import *
- import resource_management.libraries.functions
- from mock.mock import MagicMock, call, patch
- @patch.object(resource_management.libraries.functions, "get_unique_id_and_date", new = MagicMock(return_value=''))
- class TestServiceCheck(RMFTestCase):
- COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package"
- STACK_VERSION = "2.0.6"
- def test_service_check_default(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
- classname = "HdfsServiceCheck",
- command = "service_check",
- config_file = "default.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assert_service_check()
- self.assertNoMoreResources()
- def test_service_check_secured(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
- classname = "HdfsServiceCheck",
- command = "service_check",
- config_file = "default.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assert_service_check()
- self.assertNoMoreResources()
- def assert_service_check(self):
- self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs hdfs://c6401.ambari.apache.org:8020 -safemode get | grep OFF',
- logoutput = True,
- tries = 20,
- conf_dir = '/etc/hadoop/conf',
- try_sleep = 3,
- bin_dir = '/usr/bin',
- user = 'hdfs',
- )
- self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp',
- conf_dir = '/etc/hadoop/conf',
- logoutput = True,
- not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]/usr/bin/hadoop --config /etc/hadoop/conf fs -test -e /tmp'",
- try_sleep = 3,
- tries = 5,
- bin_dir = '/usr/bin',
- user = 'hdfs',
- )
- self.assertResourceCalled('ExecuteHadoop', 'fs -chmod 777 /tmp',
- conf_dir = '/etc/hadoop/conf',
- logoutput = True,
- try_sleep = 3,
- tries = 5,
- bin_dir = '/usr/bin',
- user = 'hdfs',
- )
- self.assertResourceCalled('ExecuteHadoop', 'fs -rm /tmp/; hadoop --config /etc/hadoop/conf fs -put /etc/passwd /tmp/',
- logoutput = True,
- tries = 5,
- conf_dir = '/etc/hadoop/conf',
- bin_dir = '/usr/bin',
- try_sleep = 3,
- user = 'hdfs',
- )
- self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /tmp/',
- logoutput = True,
- tries = 5,
- conf_dir = '/etc/hadoop/conf',
- bin_dir = '/usr/bin',
- try_sleep = 3,
- user = 'hdfs',
- )
|