TestAmbariClient.py 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211
  1. #!/usr/bin/env python2.6
  2. '''
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. '''
  17. from mock.mock import MagicMock, patch
  18. from ambari_client.ambari_api import AmbariClient
  19. from HttpClientInvoker import HttpClientInvoker
  20. from ambari_client.model.stack import StackConfigModel, StackComponentModel
  21. import unittest
  22. import logging
  23. class TestAmbariClient(unittest.TestCase):
  24. def setUp(self):
  25. http_client_logger = logging.getLogger()
  26. http_client_logger.info('Running test:' + self.id())
  27. def create_client(self, http_client_mock = MagicMock()):
  28. http_client_mock.invoke.side_effect = HttpClientInvoker.http_client_invoke_side_effects
  29. client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock)
  30. return client
  31. def test_init(self):
  32. """
  33. AmbariClient is the top-level root resources.
  34. This testcase checks if when the init method was called &
  35. the httpclient was initialized
  36. """
  37. client = AmbariClient("localhost", 8080, "admin", "admin", version=1)
  38. self.assertEqual(client.version, 1, "version should be 1")
  39. self.assertEqual(client.host_url, "http://localhost:8080/api/v1",
  40. "host_url should be http://localhost:8080/api/v1")
  41. client = AmbariClient(host_name="localhost", user_name="admin", password="admin")
  42. self.assertEqual(client.version, 1, "version should be 1")
  43. self.assertEqual(client.host_url, "http://localhost:8080/api/v1",
  44. "host_url should be http://localhost:8080/api/v1")
  45. client = AmbariClient(host_name="localhost")
  46. self.assertEqual(client.version, 1, "version should be 1")
  47. self.assertEqual(client.host_url, "http://localhost:8080/api/v1",
  48. "host_url should be http://localhost:8080/api/v1")
  49. client = AmbariClient("localhost", 8443, "admin", "admin", use_https=True)
  50. self.assertEqual(client.version, 1, "version should be 1")
  51. self.assertEqual(client.host_url, "https://localhost:8443/api/v1",
  52. "host_url should be https://localhost:8443/api/v1")
  53. def test_get_all_clusters(self):
  54. """
  55. Get all clusters.
  56. This testcase checks if get_all_clusters returns a list of ModelList.
  57. """
  58. expected_output = {'items': [{'cluster_name': u'test1', 'version': u'HDP-1.2.1'}]}
  59. client = self.create_client()
  60. all_clusters = client.get_all_clusters()
  61. self.assertEqual(len(all_clusters), 1)
  62. self.assertEqual(all_clusters.to_json_dict(), expected_output)
  63. def test_get_cluster(self):
  64. """
  65. Get all clusters.
  66. This testcase checks if get_all_clusters returns a list of ModelList.
  67. """
  68. expected_dict_output = {'cluster_name': u'test1', 'version': u'HDP-1.2.1'}
  69. client = self.create_client()
  70. cluster = client.get_cluster('test1')
  71. self.assertEqual(cluster.cluster_name, "test1", "cluster_name should be test1 ")
  72. self.assertEqual(cluster.to_json_dict(), expected_dict_output, "to_json_dict should convert ClusterModel")
  73. def test_get_host(self):
  74. """
  75. Get host
  76. This testcase checks if client.get_host returns a correct host
  77. """
  78. expected_dict_output = {'ip': '10.0.2.15', 'host_name': 'dev06.hortonworks.com', 'rack_info': '/default-rack'}
  79. client = self.create_client()
  80. host = client.get_host('dev06.hortonworks.com')
  81. self.assertEqual(host.to_json_dict(), expected_dict_output)
  82. self.assertEqual(host.host_state, "HEARTBEAT_LOST")
  83. def test_get_all_hosts(self):
  84. """
  85. Get all hosts.
  86. This testcase checks if get_all_hosts returns a list of ModelList.
  87. """
  88. expected_hosts_dict = {'items': [{'ip': None, 'host_name': u'apspal44-83', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'apspal44-84', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'apspal44-85', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'apspal44-86', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'apspal44-87', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'apspal44-88', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'apspal44-89', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'r01hn01', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'r01mgt', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'r01wn01', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'r01wn02', 'rack_info': '/default-rack'}, {'ip': None, 'host_name': u'r01wn03', 'rack_info': '/default-rack'}]}
  89. client = self.create_client()
  90. all_hosts = client.get_all_hosts()
  91. self.assertEqual(len(all_hosts), 12, "There should be 12 hosts from the response")
  92. self.assertEqual(all_hosts.to_json_dict(), expected_hosts_dict)
  93. def test_bootstrap_hosts(self):
  94. """
  95. Test Bootstrap
  96. """
  97. http_client_mock = MagicMock()
  98. ssh_key = 'abc!@#$%^&*()_:"|<>?[];\'\\./'
  99. host_list = ['dev05.hortonworks.com','dev06.hortonworks.com']
  100. expected_path = '//bootstrap'
  101. expected_headers = {'Content-Type': 'application/json'}
  102. expected_request = {'hosts': host_list, 'sshKey': 'abc!@#$%^&*()_:"|<>?[];\\\'\\\\./'}
  103. expected_response = {'status': 201, 'message': u'Running Bootstrap now.', 'requestId': 5}
  104. client = self.create_client(http_client_mock)
  105. resp = client.bootstrap_hosts(host_list, ssh_key)
  106. self.assertEqual(resp.to_json_dict(), expected_response)
  107. http_client_mock.invoke.assert_called_with('POST', expected_path, headers=expected_headers, payload=expected_request)
  108. def test_create_cluster(self):
  109. """
  110. Test create cluster
  111. """
  112. http_client_mock = MagicMock()
  113. expected_path = '//clusters/c1'
  114. expected_request = {'Clusters': {'version': 'HDP-2.0.5'}}
  115. client = self.create_client(http_client_mock)
  116. resp = client.create_cluster('c1', 'HDP-2.0.5')
  117. http_client_mock.invoke.assert_called_with('POST', expected_path, headers=None, payload=expected_request)
  118. def test_delete_cluster(self):
  119. """
  120. Test create cluster
  121. """
  122. http_client_mock = MagicMock()
  123. expected_path = '//clusters/c1'
  124. expected_request = None
  125. client = self.create_client(http_client_mock)
  126. resp = client.delete_cluster('c1')
  127. http_client_mock.invoke.assert_called_with('DELETE', expected_path, headers=None, payload=expected_request)
  128. def test_delete_host(self):
  129. """
  130. Test delete host
  131. """
  132. http_client_mock = MagicMock()
  133. expected_path = '//hosts/abc.abc.abc'
  134. expected_request = None
  135. client = self.create_client(http_client_mock)
  136. resp = client.delete_host('abc.abc.abc')
  137. http_client_mock.invoke.assert_called_with('DELETE', expected_path, headers=None, payload=expected_request)
  138. def test_get_config(self):
  139. """
  140. Test get config
  141. """
  142. expected_dict = {'items': [{'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'datanode_du_reserved', 'property_value': u'1'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.access.time.precision', 'property_value': u'0'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.balance.bandwidthPerSec', 'property_value': u'6250000'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.block.access.token.enable', 'property_value': u'true'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.block.size', 'property_value': u'134217728'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.blockreport.initialDelay', 'property_value': u'120'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.cluster.administrators', 'property_value': u' hdfs'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.datanode.du.pct', 'property_value': u'0.85f'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.datanode.failed.volumes.tolerated', 'property_value': u'0'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.datanode.ipc.address', 'property_value': u'0.0.0.0:8010'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.datanode.max.xcievers', 'property_value': u'4096'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.datanode.socket.write.timeout', 'property_value': u'0'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.heartbeat.interval', 'property_value': u'3'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.https.port', 'property_value': u'50470'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.namenode.avoid.read.stale.datanode', 'property_value': u'true'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.namenode.avoid.write.stale.datanode', 'property_value': u'true'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.namenode.handler.count', 'property_value': u'100'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.namenode.handler.count', 'property_value': u'40'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.namenode.stale.datanode.interval', 'property_value': u'30000'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.namenode.write.stale.datanode.ratio', 'property_value': u'1.0f'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.permissions', 'property_value': u'true'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.permissions.supergroup', 'property_value': u'hdfs'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.replication.max', 'property_value': u'50'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.safemode.threshold.pct', 'property_value': u'1.0f'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.secondary.https.port', 'property_value': u'50490'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.umaskmode', 'property_value': u'077'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs.web.ugi', 'property_value': u'gopher,gopher'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs_block_local_path_access_user', 'property_value': u'hbase'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs_data_dir', 'property_value': u'/hadoop/hdfs/data'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs_datanode_address', 'property_value': u'50010'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs_datanode_data_dir_perm', 'property_value': u'750'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs_datanode_failed_volume_tolerated', 'property_value': u'0'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs_datanode_http_address', 'property_value': u'50075'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs_name_dir', 'property_value': u'/hadoop/hdfs/namenode'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs_replication', 'property_value': u'3'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dfs_webhdfs_enabled', 'property_value': u'true'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'dtnode_heapsize', 'property_value': u'1024'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'fs.checkpoint.edits.dir', 'property_value': u'${fs.checkpoint.dir}'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'fs.checkpoint.period', 'property_value': u'21600'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'fs.checkpoint.size', 'property_value': u'536870912'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'fs.trash.interval', 'property_value': u'360'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'fs_checkpoint_dir', 'property_value': u'/hadoop/hdfs/namesecondary'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'fs_checkpoint_period', 'property_value': u'21600'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'fs_checkpoint_size', 'property_value': u'0.5'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'hadoop.security.authentication', 'property_value': u'simple'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'hadoop_heapsize', 'property_value': u'1024'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'hadoop_pid_dir_prefix', 'property_value': u'/var/run/hadoop'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'hdfs_log_dir_prefix', 'property_value': u'/var/log/hadoop'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'hdfs_user', 'property_value': u'hdfs'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'io.compression.codec.lzo.class', 'property_value': u'com.hadoop.compression.lzo.LzoCodec'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'io.compression.codecs', 'property_value': u'org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,com.hadoop.compression.lzo.LzoCodec,com.hadoop.compression.lzo.LzopCodec,org.apache.hadoop.io.compress.SnappyCodec'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'io.file.buffer.size', 'property_value': u'131072'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'io.serializations', 'property_value': u'org.apache.hadoop.io.serializer.WritableSerialization'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'ipc.client.connect.max.retries', 'property_value': u'50'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'ipc.client.connection.maxidletime', 'property_value': u'30000'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'ipc.client.idlethreshold', 'property_value': u'8000'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'ipc.server.max.response.size', 'property_value': u'5242880'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'ipc.server.read.threadpool.size', 'property_value': u'5'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'kerberos_domain', 'property_value': u'EXAMPLE.COM'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'keytab_path', 'property_value': u'/etc/security/keytabs'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'namenode_formatted_mark_dir', 'property_value': u'/var/run/hadoop/hdfs/namenode/formatted/'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'namenode_heapsize', 'property_value': u'1024'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'namenode_opt_maxnewsize', 'property_value': u'640'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'namenode_opt_newsize', 'property_value': u'200'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'proxyuser_group', 'property_value': u'users'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'security.client.datanode.protocol.acl', 'property_value': u'*'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'security.client.protocol.acl', 'property_value': u'*'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'security.datanode.protocol.acl', 'property_value': u'*'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'security.inter.datanode.protocol.acl', 'property_value': u'*'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'security.inter.tracker.protocol.acl', 'property_value': u'*'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'security.job.submission.protocol.acl', 'property_value': u'*'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'security.namenode.protocol.acl', 'property_value': u'*'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'security.task.umbilical.protocol.acl', 'property_value': u'*'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'security_enabled', 'property_value': u'false'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'property_name': u'webinterface.private.actions', 'property_value': u'false'}]}
  143. expected_first_item = StackConfigModel(None, property_name='datanode_du_reserved' , property_value='1' , service_name='HDFS' , stack_version='1.3.0')
  144. expected_request = None
  145. client = self.create_client()
  146. configs = client.get_config('1.3.0','HDFS')
  147. self.assertEquals(len(configs), 75)
  148. self.assertEquals(str(configs[0]),str(expected_first_item))
  149. self.assertEquals(configs.to_json_dict(), expected_dict)
  150. def test_get_components(self):
  151. """
  152. Test get components
  153. """
  154. expected_dict = {'items': [{'stack_version': u'1.3.0', 'service_name': u'HDFS', 'component_name': u'DATANODE'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'component_name': u'HDFS_CLIENT'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'component_name': u'NAMENODE'}, {'stack_version': u'1.3.0', 'service_name': u'HDFS', 'component_name': u'SECONDARY_NAMENODE'}]}
  155. expected_first_item = StackComponentModel(None, component_name='DATANODE', service_name='HDFS' , stack_version='1.3.0')
  156. expected_request = None
  157. client = self.create_client()
  158. components = client.get_components('1.3.0','HDFS')
  159. self.assertEquals(len(components), 4)
  160. self.assertEquals(str(components[0]),str(expected_first_item))
  161. self.assertEquals(components.to_json_dict(), expected_dict)