hdp_select.py 8.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226
  1. #!/usr/bin/env python
  2. """
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. """
  17. import sys
  18. from resource_management.core.logger import Logger
  19. from resource_management.core.exceptions import Fail
  20. from resource_management.core.resources.system import Execute
  21. from resource_management.libraries.functions.default import default
  22. from resource_management.libraries.functions.get_hdp_version import get_hdp_version
  23. from resource_management.libraries.script.script import Script
  24. from resource_management.core.shell import call
  25. # hdp-select set oozie-server 2.2.0.0-1234
  26. TEMPLATE = ('hdp-select', 'set')
  27. # a mapping of Ambari server role to hdp-select component name for all
  28. # non-clients
  29. SERVER_ROLE_DIRECTORY_MAP = {
  30. 'ACCUMULO_MASTER' : 'accumulo-master',
  31. 'ACCUMULO_MONITOR' : 'accumulo-monitor',
  32. 'ACCUMULO_GC' : 'accumulo-gc',
  33. 'ACCUMULO_TRACER' : 'accumulo-tracer',
  34. 'ACCUMULO_TSERVER' : 'accumulo-tablet',
  35. 'ATLAS_SERVER' : 'atlas-server',
  36. 'FLUME_HANDLER' : 'flume-server',
  37. 'FALCON_SERVER' : 'falcon-server',
  38. 'NAMENODE' : 'hadoop-hdfs-namenode',
  39. 'DATANODE' : 'hadoop-hdfs-datanode',
  40. 'SECONDARY_NAMENODE' : 'hadoop-hdfs-secondarynamenode',
  41. 'NFS_GATEWAY' : 'hadoop-hdfs-nfs3',
  42. 'JOURNALNODE' : 'hadoop-hdfs-journalnode',
  43. 'HBASE_MASTER' : 'hbase-master',
  44. 'HBASE_REGIONSERVER' : 'hbase-regionserver',
  45. 'HIVE_METASTORE' : 'hive-metastore',
  46. 'HIVE_SERVER' : 'hive-server2',
  47. 'WEBHCAT_SERVER' : 'hive-webhcat',
  48. 'KAFKA_BROKER' : 'kafka-broker',
  49. 'KNOX_GATEWAY' : 'knox-server',
  50. 'OOZIE_SERVER' : 'oozie-server',
  51. 'RANGER_ADMIN' : 'ranger-admin',
  52. 'RANGER_USERSYNC' : 'ranger-usersync',
  53. 'SPARK_JOBHISTORYSERVER' : 'spark-historyserver',
  54. 'NIMBUS' : 'storm-nimbus',
  55. 'SUPERVISOR' : 'storm-supervisor',
  56. 'HISTORYSERVER' : 'hadoop-mapreduce-historyserver',
  57. 'APP_TIMELINE_SERVER' : 'hadoop-yarn-timelineserver',
  58. 'NODEMANAGER' : 'hadoop-yarn-nodemanager',
  59. 'RESOURCEMANAGER' : 'hadoop-yarn-resourcemanager',
  60. 'ZOOKEEPER_SERVER' : 'zookeeper-server'
  61. }
  62. # mapping of service check to hdp-select component
  63. SERVICE_CHECK_DIRECTORY_MAP = {
  64. "HDFS_SERVICE_CHECK" : "hadoop-client",
  65. "TEZ_SERVICE_CHECK" : "hadoop-client",
  66. "PIG_SERVICE_CHECK" : "hadoop-client",
  67. "HIVE_SERVICE_CHECK" : "hadoop-client",
  68. "OOZIE_SERVICE_CHECK" : "hadoop-client",
  69. "MAHOUT_SERVICE_CHECK" : "mahout-client"
  70. }
  71. # /usr/hdp/current/hadoop-client/[bin|sbin|libexec|lib]
  72. # /usr/hdp/2.3.0.0-1234/hadoop/[bin|sbin|libexec|lib]
  73. HADOOP_DIR_TEMPLATE = "/usr/hdp/{0}/{1}/{2}"
  74. # /usr/hdp/current/hadoop-client
  75. # /usr/hdp/2.3.0.0-1234/hadoop
  76. HADOOP_HOME_DIR_TEMPLATE = "/usr/hdp/{0}/{1}"
  77. HADOOP_DIR_DEFAULTS = {
  78. "home": "/usr/lib/hadoop",
  79. "libexec": "/usr/lib/hadoop/libexec",
  80. "sbin": "/usr/lib/hadoop/sbin",
  81. "bin": "/usr/bin",
  82. "lib": "/usr/lib/hadoop/lib"
  83. }
  84. def select(component, version):
  85. """
  86. Executes hdp-select on the specific component and version. Some global
  87. variables that are imported via params/status_params/params_linux will need
  88. to be recalcuated after the hdp-select. However, python does not re-import
  89. existing modules. The only way to ensure that the configuration variables are
  90. recalculated is to call reload(...) on each module that has global parameters.
  91. After invoking hdp-select, this function will also reload params, status_params,
  92. and params_linux.
  93. :param component: the hdp-select component, such as oozie-server
  94. :param version: the version to set the component to, such as 2.2.0.0-1234
  95. """
  96. command = TEMPLATE + (component, version)
  97. Execute(command, sudo=True)
  98. # don't trust the ordering of modules:
  99. # 1) status_params
  100. # 2) params_linux
  101. # 3) params
  102. modules = sys.modules
  103. param_modules = "status_params", "params_linux", "params"
  104. for moduleName in param_modules:
  105. if moduleName in modules:
  106. module = modules.get(moduleName)
  107. reload(module)
  108. Logger.info("After hdp-select {0}, reloaded module {1}".format(component, moduleName))
  109. def get_role_component_current_hdp_version():
  110. """
  111. Gets the current HDP version of the component that this role command is for.
  112. :return: the current HDP version of the specified component or None
  113. """
  114. hdp_select_component = None
  115. role = default("/role", "")
  116. role_command = default("/roleCommand", "")
  117. if role in SERVER_ROLE_DIRECTORY_MAP:
  118. hdp_select_component = SERVER_ROLE_DIRECTORY_MAP[role]
  119. elif role_command == "SERVICE_CHECK" and role in SERVICE_CHECK_DIRECTORY_MAP:
  120. hdp_select_component = SERVICE_CHECK_DIRECTORY_MAP[role]
  121. if hdp_select_component is None:
  122. return None
  123. current_hdp_version = get_hdp_version(hdp_select_component)
  124. if current_hdp_version is None:
  125. Logger.warning("Unable to determine hdp-select version for {0}".format(
  126. hdp_select_component))
  127. else:
  128. Logger.info("{0} is currently at version {1}".format(
  129. hdp_select_component, current_hdp_version))
  130. return current_hdp_version
  131. def get_hadoop_dir(target, force_latest_on_upgrade=False, upgrade_stack_only=False):
  132. """
  133. Return the hadoop shared directory in the following override order
  134. 1. Use default for 2.1 and lower
  135. 2. If 2.2 and higher, use /usr/hdp/current/hadoop-client/{target}
  136. 3. If 2.2 and higher AND for an upgrade, use /usr/hdp/<version>/hadoop/{target}.
  137. However, if the upgrade has not yet invoked hdp-select, return the current
  138. version of the component.
  139. :target: the target directory
  140. :force_latest_on_upgrade: if True, then this will return the "current" directory
  141. without the HDP version built into the path, such as /usr/hdp/current/hadoop-client
  142. :upgrade_stack_only: if True, provides upgrade stack target if present and not current
  143. """
  144. if not target in HADOOP_DIR_DEFAULTS:
  145. raise Fail("Target {0} not defined".format(target))
  146. hadoop_dir = HADOOP_DIR_DEFAULTS[target]
  147. if Script.is_hdp_stack_greater_or_equal("2.2"):
  148. # home uses a different template
  149. if target == "home":
  150. hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format("current", "hadoop-client")
  151. else:
  152. hadoop_dir = HADOOP_DIR_TEMPLATE.format("current", "hadoop-client", target)
  153. # if we are not forcing "current" for HDP 2.2, then attempt to determine
  154. # if the exact version needs to be returned in the directory
  155. if not force_latest_on_upgrade:
  156. stack_info = _get_upgrade_stack()
  157. if stack_info is not None:
  158. stack_version = stack_info[1]
  159. # determine if hdp-select has been run and if not, then use the current
  160. # hdp version until this component is upgraded
  161. current_hdp_version = get_role_component_current_hdp_version()
  162. if current_hdp_version is not None and stack_version != current_hdp_version and not upgrade_stack_only:
  163. stack_version = current_hdp_version
  164. if target == "home":
  165. # home uses a different template
  166. hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_version, "hadoop")
  167. else:
  168. hadoop_dir = HADOOP_DIR_TEMPLATE.format(stack_version, "hadoop", target)
  169. return hadoop_dir
  170. def _get_upgrade_stack():
  171. """
  172. Gets the stack name and stack version if an upgrade is currently in progress.
  173. :return: the stack name and stack version as a tuple, or None if an
  174. upgrade is not in progress.
  175. """
  176. from resource_management.libraries.functions.default import default
  177. direction = default("/commandParams/upgrade_direction", None)
  178. stack_name = default("/hostLevelParams/stack_name", None)
  179. stack_version = default("/commandParams/version", None)
  180. if direction and stack_name and stack_version:
  181. return (stack_name, stack_version)
  182. return None
  183. def get_hdp_versions():
  184. code, out = call("hdp-select versions")
  185. if 0 == code:
  186. versions = []
  187. for line in out.splitlines():
  188. versions.append(line.rstrip('\n'))
  189. return versions
  190. else:
  191. return []