hdfs.py 8.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255
  1. """
  2. Licensed to the Apache Software Foundation (ASF) under one
  3. or more contributor license agreements. See the NOTICE file
  4. distributed with this work for additional information
  5. regarding copyright ownership. The ASF licenses this file
  6. to you under the Apache License, Version 2.0 (the
  7. "License"); you may not use this file except in compliance
  8. with the License. You may obtain a copy of the License at
  9. http://www.apache.org/licenses/LICENSE-2.0
  10. Unless required by applicable law or agreed to in writing, software
  11. distributed under the License is distributed on an "AS IS" BASIS,
  12. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. See the License for the specific language governing permissions and
  14. limitations under the License.
  15. Ambari Agent
  16. """
  17. from resource_management.libraries.script.script import Script
  18. from resource_management.core.resources.system import Execute, Directory, File, Link
  19. from resource_management.core.resources import Package
  20. from resource_management.core.source import Template
  21. from resource_management.core.resources.service import ServiceConfig
  22. from resource_management.libraries.resources.xml_config import XmlConfig
  23. from resource_management.libraries.functions.get_lzo_packages import get_lzo_packages
  24. from resource_management.core.exceptions import Fail
  25. from resource_management.core.logger import Logger
  26. from resource_management.libraries.functions.format import format
  27. import os
  28. from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
  29. from ambari_commons import OSConst
  30. @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
  31. def hdfs(name=None):
  32. import params
  33. if params.create_lib_snappy_symlinks:
  34. install_snappy()
  35. # On some OS this folder could be not exists, so we will create it before pushing there files
  36. Directory(params.limits_conf_dir,
  37. create_parents = True,
  38. owner='root',
  39. group='root'
  40. )
  41. File(os.path.join(params.limits_conf_dir, 'hdfs.conf'),
  42. owner='root',
  43. group='root',
  44. mode=0644,
  45. content=Template("hdfs.conf.j2")
  46. )
  47. if params.security_enabled:
  48. File(os.path.join(params.hadoop_conf_dir, 'hdfs_dn_jaas.conf'),
  49. owner=params.hdfs_user,
  50. group=params.user_group,
  51. content=Template("hdfs_dn_jaas.conf.j2")
  52. )
  53. File(os.path.join(params.hadoop_conf_dir, 'hdfs_nn_jaas.conf'),
  54. owner=params.hdfs_user,
  55. group=params.user_group,
  56. content=Template("hdfs_nn_jaas.conf.j2")
  57. )
  58. if params.dfs_ha_enabled:
  59. File(os.path.join(params.hadoop_conf_dir, 'hdfs_jn_jaas.conf'),
  60. owner=params.hdfs_user,
  61. group=params.user_group,
  62. content=Template("hdfs_jn_jaas.conf.j2")
  63. )
  64. tc_mode = 0644
  65. tc_owner = "root"
  66. else:
  67. tc_mode = None
  68. tc_owner = params.hdfs_user
  69. if "hadoop-policy" in params.config['configurations']:
  70. XmlConfig("hadoop-policy.xml",
  71. conf_dir=params.hadoop_conf_dir,
  72. configurations=params.config['configurations']['hadoop-policy'],
  73. configuration_attributes=params.config['configuration_attributes']['hadoop-policy'],
  74. owner=params.hdfs_user,
  75. group=params.user_group
  76. )
  77. if "ssl-client" in params.config['configurations']:
  78. XmlConfig("ssl-client.xml",
  79. conf_dir=params.hadoop_conf_dir,
  80. configurations=params.config['configurations']['ssl-client'],
  81. configuration_attributes=params.config['configuration_attributes']['ssl-client'],
  82. owner=params.hdfs_user,
  83. group=params.user_group
  84. )
  85. Directory(params.hadoop_conf_secure_dir,
  86. create_parents = True,
  87. owner='root',
  88. group=params.user_group,
  89. cd_access='a',
  90. )
  91. XmlConfig("ssl-client.xml",
  92. conf_dir=params.hadoop_conf_secure_dir,
  93. configurations=params.config['configurations']['ssl-client'],
  94. configuration_attributes=params.config['configuration_attributes']['ssl-client'],
  95. owner=params.hdfs_user,
  96. group=params.user_group
  97. )
  98. if "ssl-server" in params.config['configurations']:
  99. XmlConfig("ssl-server.xml",
  100. conf_dir=params.hadoop_conf_dir,
  101. configurations=params.config['configurations']['ssl-server'],
  102. configuration_attributes=params.config['configuration_attributes']['ssl-server'],
  103. owner=params.hdfs_user,
  104. group=params.user_group
  105. )
  106. XmlConfig("hdfs-site.xml",
  107. conf_dir=params.hadoop_conf_dir,
  108. configurations=params.config['configurations']['hdfs-site'],
  109. configuration_attributes=params.config['configuration_attributes']['hdfs-site'],
  110. owner=params.hdfs_user,
  111. group=params.user_group
  112. )
  113. XmlConfig("core-site.xml",
  114. conf_dir=params.hadoop_conf_dir,
  115. configurations=params.config['configurations']['core-site'],
  116. configuration_attributes=params.config['configuration_attributes']['core-site'],
  117. owner=params.hdfs_user,
  118. group=params.user_group,
  119. mode=0644
  120. )
  121. File(os.path.join(params.hadoop_conf_dir, 'slaves'),
  122. owner=tc_owner,
  123. content=Template("slaves.j2")
  124. )
  125. if params.lzo_enabled:
  126. lzo_packages = get_lzo_packages(params.stack_version_unformatted)
  127. Package(lzo_packages,
  128. retry_on_repo_unavailability=params.agent_stack_retry_on_unavailability,
  129. retry_count=params.agent_stack_retry_count)
  130. def install_snappy():
  131. import params
  132. Directory([params.so_target_dir_x86, params.so_target_dir_x64],
  133. create_parents = True,
  134. )
  135. Link(params.so_target_x86,
  136. to=params.so_src_x86,
  137. )
  138. Link(params.so_target_x64,
  139. to=params.so_src_x64,
  140. )
  141. class ConfigStatusParser():
  142. def __init__(self):
  143. self.reconfig_successful = False
  144. def handle_new_line(self, line, is_stderr):
  145. if is_stderr:
  146. return
  147. if line.startswith('SUCCESS: Changed property'):
  148. self.reconfig_successful = True
  149. Logger.info('[reconfig] %s' % (line))
  150. @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
  151. def reconfig(componentName, componentAddress):
  152. import params
  153. if params.security_enabled:
  154. Execute(params.nn_kinit_cmd,
  155. user=params.hdfs_user
  156. )
  157. nn_reconfig_cmd = format('hdfs --config {hadoop_conf_dir} dfsadmin -reconfig {componentName} {componentAddress} start')
  158. Execute (nn_reconfig_cmd,
  159. user=params.hdfs_user,
  160. logoutput=True,
  161. path=params.hadoop_bin_dir
  162. )
  163. nn_reconfig_cmd = format('hdfs --config {hadoop_conf_dir} dfsadmin -reconfig {componentName} {componentAddress} status')
  164. config_status_parser = ConfigStatusParser()
  165. Execute (nn_reconfig_cmd,
  166. user=params.hdfs_user,
  167. logoutput=False,
  168. path=params.hadoop_bin_dir,
  169. on_new_line=config_status_parser.handle_new_line
  170. )
  171. if not config_status_parser.reconfig_successful:
  172. Logger.info('Reconfiguration failed')
  173. raise Fail('Reconfiguration failed!')
  174. Logger.info('Reconfiguration successfully completed.')
  175. @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
  176. def hdfs(component=None):
  177. import params
  178. if component == "namenode":
  179. directories = params.dfs_name_dir.split(",")
  180. Directory(directories,
  181. owner=params.hdfs_user,
  182. mode="(OI)(CI)F",
  183. create_parents = True
  184. )
  185. File(params.exclude_file_path,
  186. content=Template("exclude_hosts_list.j2"),
  187. owner=params.hdfs_user,
  188. mode="f",
  189. )
  190. if params.hdfs_include_file:
  191. File(params.include_file_path,
  192. content=Template("include_hosts_list.j2"),
  193. owner=params.hdfs_user,
  194. mode="f",
  195. )
  196. pass
  197. if params.service_map.has_key(component):
  198. service_name = params.service_map[component]
  199. ServiceConfig(service_name,
  200. action="change_user",
  201. username=params.hdfs_user,
  202. password=Script.get_password(params.hdfs_user))
  203. if "hadoop-policy" in params.config['configurations']:
  204. XmlConfig("hadoop-policy.xml",
  205. conf_dir=params.hadoop_conf_dir,
  206. configurations=params.config['configurations']['hadoop-policy'],
  207. owner=params.hdfs_user,
  208. mode="f",
  209. configuration_attributes=params.config['configuration_attributes']['hadoop-policy']
  210. )
  211. XmlConfig("hdfs-site.xml",
  212. conf_dir=params.hadoop_conf_dir,
  213. configurations=params.config['configurations']['hdfs-site'],
  214. owner=params.hdfs_user,
  215. mode="f",
  216. configuration_attributes=params.config['configuration_attributes']['hdfs-site']
  217. )