conf_select.py 9.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330
  1. #!/usr/bin/env python
  2. """
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. """
  17. __all__ = ["select", "create", "get_hadoop_conf_dir", "get_hadoop_dir"]
  18. import os
  19. import version
  20. import hdp_select
  21. from resource_management.core import shell
  22. from resource_management.libraries.script.script import Script
  23. from resource_management.core.logger import Logger
  24. from resource_management.core.resources.system import Directory
  25. PACKAGE_DIRS = {
  26. "accumulo": [
  27. {
  28. "conf_dir": "/etc/accumulo/conf",
  29. "current_dir": "/usr/hdp/current/accumulo-client/conf"
  30. }
  31. ],
  32. "falcon": [
  33. {
  34. "conf_dir": "/etc/falcon/conf",
  35. "current_dir": "/usr/hdp/current/falcon-client/conf"
  36. }
  37. ],
  38. "hadoop": [
  39. {
  40. "conf_dir": "/etc/hadoop/conf",
  41. "current_dir": "/usr/hdp/current/hadoop-client/conf"
  42. }
  43. ],
  44. "hbase": [
  45. {
  46. "conf_dir": "/etc/hbase/conf",
  47. "current_dir": "/usr/hdp/current/hbase-client/conf"
  48. }
  49. ],
  50. "hive": [
  51. {
  52. "conf_dir": "/etc/hive/conf",
  53. "current_dir": "/usr/hdp/current/hive-client/conf"
  54. }
  55. ],
  56. "kafka": [
  57. {
  58. "conf_dir": "/etc/kafka/conf",
  59. "current_dir": "/usr/hdp/current/kafka-broker/conf"
  60. }
  61. ],
  62. "knox": [
  63. {
  64. "conf_dir": "/etc/knox/conf",
  65. "current_dir": "/usr/hdp/current/knox-server/conf"
  66. }
  67. ],
  68. "mahout": [
  69. {
  70. "conf_dir": "/etc/mahout/conf",
  71. "current_dir": "/usr/hdp/current/mahout-client/conf"
  72. }
  73. ],
  74. "oozie": [
  75. {
  76. "conf_dir": "/etc/oozie/conf",
  77. "current_dir": "/usr/hdp/current/oozie-client/conf"
  78. }
  79. ],
  80. "phoenix": [
  81. {
  82. "conf_dir": "/etc/phoenix/conf",
  83. "current_dir": "/usr/hdp/current/phoenix-client/conf"
  84. }
  85. ],
  86. "ranger-admin": [
  87. {
  88. "conf_dir": "/etc/ranger/admin/conf",
  89. "current_dir": "/usr/hdp/current/ranger-admin/conf"
  90. }
  91. ],
  92. "ranger-kms": [
  93. {
  94. "conf_dir": "/etc/ranger/kms/conf",
  95. "current_dir": "/usr/hdp/current/ranger-kms/conf"
  96. }
  97. ],
  98. "ranger-usersync": [
  99. {
  100. "conf_dir": "/etc/ranger/usersync/conf",
  101. "current_dir": "/usr/hdp/current/ranger-usersync/conf"
  102. }
  103. ],
  104. "slider": [
  105. {
  106. "conf_dir": "/etc/slider/conf",
  107. "current_dir": "/usr/hdp/current/slider-client/conf"
  108. }
  109. ],
  110. "spark": [
  111. {
  112. "conf_dir": "/etc/spark/conf",
  113. "current_dir": "/usr/hdp/current/spark-client/conf"
  114. }
  115. ],
  116. "sqoop": [
  117. {
  118. "conf_dir": "/etc/sqoop/conf",
  119. "current_dir": "/usr/hdp/current/sqoop-client/conf"
  120. }
  121. ],
  122. "storm": [
  123. {
  124. "conf_dir": "/etc/storm/conf",
  125. "current_dir": "/usr/hdp/current/storm-client/conf"
  126. }
  127. ],
  128. "tez": [
  129. {
  130. "conf_dir": "/etc/tez/conf",
  131. "current_dir": "/usr/hdp/current/tez-client/conf"
  132. }
  133. ],
  134. "zookeeper": [
  135. {
  136. "conf_dir": "/etc/zookeeper/conf",
  137. "current_dir": "/usr/hdp/current/zookeeper-client/conf"
  138. }
  139. ],
  140. "pig": [
  141. {
  142. "conf_dir": "/etc/pig/conf",
  143. "current_dir": "/usr/hdp/current/pig-client/conf"
  144. }
  145. ],
  146. "flume": [
  147. {
  148. "conf_dir": "/etc/flume/conf",
  149. "current_dir": "/usr/hdp/current/flume-server/conf"
  150. }
  151. ],
  152. "storm-slider-client": [
  153. {
  154. "conf_dir": "/etc/storm-slider-client/conf",
  155. "current_dir": "/usr/hdp/current/storm-slider-client/conf"
  156. }
  157. ],
  158. "hive-hcatalog": [
  159. {
  160. "conf_dir": "/etc/hive-webhcat/conf",
  161. "prefix": "/etc/hive-webhcat",
  162. "current_dir": "/usr/hdp/current/hive-webhcat/etc/webhcat"
  163. },
  164. {
  165. "conf_dir": "/etc/hive-hcatalog/conf",
  166. "prefix": "/etc/hive-hcatalog",
  167. "current_dir": "/usr/hdp/current/hive-webhcat/etc/hcatalog"
  168. }
  169. ]
  170. }
  171. def get_cmd(command, package, version):
  172. return ('conf-select', command, '--package', package, '--stack-version', version, '--conf-version', '0')
  173. def _valid(stack_name, package, ver):
  174. if stack_name != "HDP":
  175. return False
  176. if version.compare_versions(version.format_hdp_stack_version(ver), "2.3.0.0") < 0:
  177. return False
  178. return True
  179. def create(stack_name, package, version, dry_run = False):
  180. """
  181. Creates a config version for the specified package
  182. :stack_name: the name of the stack
  183. :package: the name of the package, as-used by conf-select
  184. :version: the version number to create
  185. """
  186. if not _valid(stack_name, package, version):
  187. return
  188. command = "dry-run-create" if dry_run else "create-conf-dir"
  189. code, stdout = shell.call(get_cmd(command, package, version), logoutput=False, quiet=True, sudo=True)
  190. # conf-select can set more than one directory
  191. # per package, so return that list, especially for dry_run
  192. dirs = []
  193. if 0 == code and stdout is not None: # just be sure we have a stdout
  194. for line in stdout.splitlines():
  195. dirs.append(line.rstrip('\n'))
  196. # take care of permissions
  197. if not code and stdout and command == "create-conf-dir":
  198. for d in dirs:
  199. Directory(d,
  200. mode=0755,
  201. cd_access='a',
  202. recursive=True)
  203. return dirs
  204. def select(stack_name, package, version, try_create=True):
  205. """
  206. Selects a config version for the specified package. Currently only works if the version is
  207. for HDP-2.3 or higher
  208. :stack_name: the name of the stack
  209. :package: the name of the package, as-used by conf-select
  210. :version: the version number to create
  211. :try_create: optional argument to attempt to create the directory before setting it
  212. """
  213. if not _valid(stack_name, package, version):
  214. return
  215. if try_create:
  216. create(stack_name, package, version)
  217. shell.checked_call(get_cmd("set-conf-dir", package, version), logoutput=False, quiet=False, sudo=True)
  218. def get_hadoop_conf_dir(force_latest_on_upgrade=False):
  219. """
  220. Gets the shared hadoop conf directory using:
  221. 1. Start with /etc/hadoop/conf
  222. 2. When the stack is greater than HDP-2.2, use /usr/hdp/current/hadoop-client/conf
  223. 3. Only when doing a RU and HDP-2.3 or higher, use the value as computed
  224. by conf-select. This is in the form /usr/hdp/VERSION/hadoop/conf to make sure
  225. the configs are written in the correct place. However, if the component itself has
  226. not yet been upgraded, it should use the hadoop configs from the prior version.
  227. This will perform an hdp-select status to determine which version to use.
  228. :param force_latest_on_upgrade: if True, then force the returned path to always
  229. be that of the upgrade target version, even if hdp-select has not been called. This
  230. is primarily used by hooks like before-ANY to ensure that hadoop environment
  231. configurations are written to the correct location since they are written out
  232. before the hdp-select/conf-select would have been called.
  233. """
  234. hadoop_conf_dir = "/etc/hadoop/conf"
  235. if Script.is_hdp_stack_greater_or_equal("2.2"):
  236. hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
  237. stack_info = hdp_select._get_upgrade_stack()
  238. # if upgrading to >= HDP 2.3
  239. if stack_info is not None and Script.is_hdp_stack_greater_or_equal("2.3"):
  240. stack_name = stack_info[0]
  241. stack_version = stack_info[1]
  242. # determine if hdp-select has been run and if not, then use the current
  243. # hdp version until this component is upgraded
  244. if not force_latest_on_upgrade:
  245. current_hdp_version = hdp_select.get_role_component_current_hdp_version()
  246. if current_hdp_version is not None and stack_version != current_hdp_version:
  247. stack_version = current_hdp_version
  248. # only change the hadoop_conf_dir path, don't conf-select this older version
  249. hadoop_conf_dir = "/usr/hdp/{0}/hadoop/conf".format(stack_version)
  250. # ensure the new HDP stack is conf-selected, but only if it exists
  251. # there are cases where hadoop might not be installed, such as on a host with only ZK
  252. if os.path.exists(hadoop_conf_dir):
  253. select(stack_name, "hadoop", stack_version)
  254. return hadoop_conf_dir
  255. def create_config_links(stack_id, stack_version):
  256. """
  257. Creates config links
  258. stack_id: stack id, ie HDP-2.3
  259. stack_version: version to set, ie 2.3.0.0-1234
  260. """
  261. if stack_id is None:
  262. Logger.info("Cannot create config links when stack_id is not defined")
  263. return
  264. args = stack_id.upper().split('-')
  265. if len(args) != 2:
  266. Logger.info("Unrecognized stack id {0}".format(stack_id))
  267. return
  268. if args[0] != "HDP":
  269. Logger.info("Unrecognized stack name {0}".format(args[0]))
  270. if version.compare_versions(version.format_hdp_stack_version(args[1]), "2.3.0.0") < 0:
  271. Logger.info("Cannot link configs unless HDP-2.3 or higher")
  272. return
  273. for k, v in PACKAGE_DIRS.iteritems():
  274. dirs = create(args[0], k, stack_version, dry_run = True)
  275. if 0 == len(dirs):
  276. Logger.debug("Package {0} is not installed".format(k))
  277. else:
  278. need = False
  279. for new_conf_dir in dirs:
  280. if not os.path.exists(new_conf_dir):
  281. need = True
  282. if need:
  283. Logger.info("Creating conf dirs {0} for {1}".format(",".join(dirs), k))
  284. try:
  285. select(args[0], k, stack_version)
  286. except Exception, err:
  287. # don't ruin someone's day
  288. Logger.logger.exception("Conf-select set failed to link '{k}'. Error: {0}".format(k, str(err)))