conf_select.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601
  1. #!/usr/bin/env python
  2. """
  3. Licensed to the Apache Software Foundation (ASF) under one
  4. or more contributor license agreements. See the NOTICE file
  5. distributed with this work for additional information
  6. regarding copyright ownership. The ASF licenses this file
  7. to you under the Apache License, Version 2.0 (the
  8. "License"); you may not use this file except in compliance
  9. with the License. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. """
  17. __all__ = ["select", "create", "get_hadoop_conf_dir", "get_hadoop_dir", "get_package_dirs"]
  18. import copy
  19. import os
  20. import version
  21. import stack_select
  22. import subprocess
  23. from resource_management.core import shell
  24. from resource_management.libraries.functions.format import format
  25. from resource_management.libraries.script.script import Script
  26. from resource_management.core.logger import Logger
  27. from resource_management.core.resources.system import Directory
  28. from resource_management.core.resources.system import Execute
  29. from resource_management.core.resources.system import Link
  30. from resource_management.libraries.functions.default import default
  31. from resource_management.libraries.functions import stack_tools
  32. from resource_management.core.exceptions import Fail
  33. from resource_management.core.shell import as_sudo
  34. from resource_management.libraries.functions.stack_features import check_stack_feature
  35. from resource_management.libraries.functions import StackFeature
  36. STACK_ROOT_PATTERN = "{{ stack_root }}"
  37. _PACKAGE_DIRS = {
  38. "accumulo": [
  39. {
  40. "conf_dir": "/etc/accumulo/conf",
  41. "current_dir": "{0}/current/accumulo-client/conf".format(STACK_ROOT_PATTERN)
  42. }
  43. ],
  44. "falcon": [
  45. {
  46. "conf_dir": "/etc/falcon/conf",
  47. "current_dir": "{0}/current/falcon-client/conf".format(STACK_ROOT_PATTERN)
  48. }
  49. ],
  50. "hadoop": [
  51. {
  52. "conf_dir": "/etc/hadoop/conf",
  53. "current_dir": "{0}/current/hadoop-client/conf".format(STACK_ROOT_PATTERN)
  54. }
  55. ],
  56. "hbase": [
  57. {
  58. "conf_dir": "/etc/hbase/conf",
  59. "current_dir": "{0}/current/hbase-client/conf".format(STACK_ROOT_PATTERN)
  60. }
  61. ],
  62. "hive": [
  63. {
  64. "conf_dir": "/etc/hive/conf",
  65. "current_dir": "{0}/current/hive-client/conf".format(STACK_ROOT_PATTERN)
  66. }
  67. ],
  68. "hive2": [
  69. {
  70. "conf_dir": "/etc/hive2/conf",
  71. "current_dir": "{0}/current/hive-server2-hive2/conf".format(STACK_ROOT_PATTERN)
  72. }
  73. ],
  74. "kafka": [
  75. {
  76. "conf_dir": "/etc/kafka/conf",
  77. "current_dir": "{0}/current/kafka-broker/conf".format(STACK_ROOT_PATTERN)
  78. }
  79. ],
  80. "knox": [
  81. {
  82. "conf_dir": "/etc/knox/conf",
  83. "current_dir": "{0}/current/knox-server/conf".format(STACK_ROOT_PATTERN)
  84. }
  85. ],
  86. "mahout": [
  87. {
  88. "conf_dir": "/etc/mahout/conf",
  89. "current_dir": "{0}/current/mahout-client/conf".format(STACK_ROOT_PATTERN)
  90. }
  91. ],
  92. "oozie": [
  93. {
  94. "conf_dir": "/etc/oozie/conf",
  95. "current_dir": "{0}/current/oozie-client/conf".format(STACK_ROOT_PATTERN)
  96. }
  97. ],
  98. "phoenix": [
  99. {
  100. "conf_dir": "/etc/phoenix/conf",
  101. "current_dir": "{0}/current/phoenix-client/conf".format(STACK_ROOT_PATTERN)
  102. }
  103. ],
  104. "ranger-admin": [
  105. {
  106. "conf_dir": "/etc/ranger/admin/conf",
  107. "current_dir": "{0}/current/ranger-admin/conf".format(STACK_ROOT_PATTERN)
  108. }
  109. ],
  110. "ranger-tagsync": [
  111. {
  112. "conf_dir": "/etc/ranger/tagsync/conf",
  113. "current_dir": "{0}/current/ranger-tagsync/conf".format(STACK_ROOT_PATTERN)
  114. }
  115. ],
  116. "ranger-kms": [
  117. {
  118. "conf_dir": "/etc/ranger/kms/conf",
  119. "current_dir": "{0}/current/ranger-kms/conf".format(STACK_ROOT_PATTERN)
  120. }
  121. ],
  122. "ranger-usersync": [
  123. {
  124. "conf_dir": "/etc/ranger/usersync/conf",
  125. "current_dir": "{0}/current/ranger-usersync/conf".format(STACK_ROOT_PATTERN)
  126. }
  127. ],
  128. "slider": [
  129. {
  130. "conf_dir": "/etc/slider/conf",
  131. "current_dir": "{0}/current/slider-client/conf".format(STACK_ROOT_PATTERN)
  132. }
  133. ],
  134. "spark": [
  135. {
  136. "conf_dir": "/etc/spark/conf",
  137. "current_dir": "{0}/current/spark-client/conf".format(STACK_ROOT_PATTERN)
  138. }
  139. ],
  140. "sqoop": [
  141. {
  142. "conf_dir": "/etc/sqoop/conf",
  143. "current_dir": "{0}/current/sqoop-client/conf".format(STACK_ROOT_PATTERN)
  144. }
  145. ],
  146. "storm": [
  147. {
  148. "conf_dir": "/etc/storm/conf",
  149. "current_dir": "{0}/current/storm-client/conf".format(STACK_ROOT_PATTERN)
  150. }
  151. ],
  152. "tez": [
  153. {
  154. "conf_dir": "/etc/tez/conf",
  155. "current_dir": "{0}/current/tez-client/conf".format(STACK_ROOT_PATTERN)
  156. }
  157. ],
  158. "zookeeper": [
  159. {
  160. "conf_dir": "/etc/zookeeper/conf",
  161. "current_dir": "{0}/current/zookeeper-client/conf".format(STACK_ROOT_PATTERN)
  162. }
  163. ],
  164. "pig": [
  165. {
  166. "conf_dir": "/etc/pig/conf",
  167. "current_dir": "{0}/current/pig-client/conf".format(STACK_ROOT_PATTERN)
  168. }
  169. ],
  170. "flume": [
  171. {
  172. "conf_dir": "/etc/flume/conf",
  173. "current_dir": "{0}/current/flume-server/conf".format(STACK_ROOT_PATTERN)
  174. }
  175. ],
  176. "storm-slider-client": [
  177. {
  178. "conf_dir": "/etc/storm-slider-client/conf",
  179. "current_dir": "{0}/current/storm-slider-client/conf".format(STACK_ROOT_PATTERN)
  180. }
  181. ],
  182. "hive-hcatalog": [
  183. {
  184. "conf_dir": "/etc/hive-webhcat/conf",
  185. "prefix": "/etc/hive-webhcat",
  186. "current_dir": "{0}/current/hive-webhcat/etc/webhcat".format(STACK_ROOT_PATTERN)
  187. },
  188. {
  189. "conf_dir": "/etc/hive-hcatalog/conf",
  190. "prefix": "/etc/hive-hcatalog",
  191. "current_dir": "{0}/current/hive-webhcat/etc/hcatalog".format(STACK_ROOT_PATTERN)
  192. }
  193. ]
  194. }
  195. def _get_cmd(command, package, version):
  196. conf_selector_path = stack_tools.get_stack_tool_path(stack_tools.CONF_SELECTOR_NAME)
  197. return ('ambari-python-wrap', conf_selector_path, command, '--package', package, '--stack-version', version, '--conf-version', '0')
  198. def _valid(stack_name, package, ver):
  199. return (ver and check_stack_feature(StackFeature.CONFIG_VERSIONING, ver))
  200. def get_package_dirs():
  201. """
  202. Get package dir mappings
  203. :return:
  204. """
  205. stack_root = Script.get_stack_root()
  206. package_dirs = copy.deepcopy(_PACKAGE_DIRS)
  207. for package_name, directories in package_dirs.iteritems():
  208. for dir in directories:
  209. current_dir = dir['current_dir']
  210. current_dir = current_dir.replace(STACK_ROOT_PATTERN, stack_root)
  211. dir['current_dir'] = current_dir
  212. return package_dirs
  213. def create(stack_name, package, version, dry_run = False):
  214. """
  215. Creates a config version for the specified package
  216. :param stack_name: the name of the stack
  217. :param package: the name of the package, as-used by <conf-selector-tool>
  218. :param version: the version number to create
  219. :param dry_run: False to create the versioned config directory, True to only return what would be created
  220. :return List of directories created
  221. """
  222. Logger.info("Checking if need to create versioned conf dir /etc/{0}/{1}/0".format(package, version))
  223. if not _valid(stack_name, package, version):
  224. Logger.info("Will not create it since parameters are not valid.")
  225. return []
  226. command = "dry-run-create" if dry_run else "create-conf-dir"
  227. code, stdout, stderr = shell.call(_get_cmd(command, package, version), logoutput=False, quiet=False, sudo=True, stderr = subprocess.PIPE)
  228. # <conf-selector-tool> can set more than one directory
  229. # per package, so return that list, especially for dry_run
  230. # > <conf-selector-tool> dry-run-create --package hive-hcatalog --stack-version 2.4.0.0-169 0
  231. # /etc/hive-webhcat/2.4.0.0-169/0
  232. # /etc/hive-hcatalog/2.4.0.0-169/0
  233. created_directories = []
  234. if 0 == code and stdout is not None: # just be sure we have a stdout
  235. for line in stdout.splitlines():
  236. created_directories.append(line.rstrip('\n'))
  237. # if directories were created, then do some post-processing
  238. if not code and stdout and not dry_run:
  239. # take care of permissions if directories were created
  240. for directory in created_directories:
  241. Directory(directory, mode=0755, cd_access='a', create_parents=True)
  242. # seed the new directories with configurations from the old (current) directories
  243. _seed_new_configuration_directories(package, created_directories)
  244. return created_directories
  245. def select(stack_name, package, version, try_create=True, ignore_errors=False):
  246. """
  247. Selects a config version for the specified package.
  248. :param stack_name: the name of the stack
  249. :param package: the name of the package, as-used by <conf-selector-tool>
  250. :param version: the version number to create
  251. :param try_create: optional argument to attempt to create the directory before setting it
  252. :param ignore_errors: optional argument to ignore any error and simply log a warning
  253. """
  254. try:
  255. if not _valid(stack_name, package, version):
  256. return
  257. if try_create:
  258. create(stack_name, package, version)
  259. shell.checked_call(_get_cmd("set-conf-dir", package, version), logoutput=False, quiet=False, sudo=True)
  260. # for consistency sake, we must ensure that the /etc/<component>/conf symlink exists and
  261. # points to <stack-root>/current/<component>/conf - this is because some people still prefer to
  262. # use /etc/<component>/conf even though <stack-root> is the "future"
  263. package_dirs = get_package_dirs()
  264. if package in package_dirs:
  265. Logger.info("Ensuring that {0} has the correct symlink structure".format(package))
  266. directory_list = package_dirs[package]
  267. for directory_structure in directory_list:
  268. conf_dir = directory_structure["conf_dir"]
  269. current_dir = directory_structure["current_dir"]
  270. # if /etc/<component>/conf is not a symlink, we need to change it
  271. if not os.path.islink(conf_dir):
  272. # if it exists, try to back it up
  273. if os.path.exists(conf_dir):
  274. parent_directory = os.path.dirname(conf_dir)
  275. conf_install_dir = os.path.join(parent_directory, "conf.backup")
  276. Execute(("cp", "-R", "-p", conf_dir, conf_install_dir),
  277. not_if = format("test -e {conf_install_dir}"), sudo = True)
  278. Directory(conf_dir, action="delete")
  279. Link(conf_dir, to = current_dir)
  280. except Exception, exception:
  281. if ignore_errors is True:
  282. Logger.warning("Could not select the directory for package {0}. Error: {1}".format(package,
  283. str(exception)))
  284. else:
  285. raise
  286. def get_hadoop_conf_dir(force_latest_on_upgrade=False):
  287. """
  288. Gets the shared hadoop conf directory using:
  289. 1. Start with /etc/hadoop/conf
  290. 2. When the stack is greater than HDP-2.2, use <stack-root>/current/hadoop-client/conf
  291. 3. Only when doing a RU and HDP-2.3 or higher, use the value as computed
  292. by <conf-selector-tool>. This is in the form <stack-root>/VERSION/hadoop/conf to make sure
  293. the configs are written in the correct place. However, if the component itself has
  294. not yet been upgraded, it should use the hadoop configs from the prior version.
  295. This will perform an <stack-selector-tool> status to determine which version to use.
  296. :param force_latest_on_upgrade: if True, then force the returned path to always
  297. be that of the upgrade target version, even if <stack-selector-tool> has not been called. This
  298. is primarily used by hooks like before-ANY to ensure that hadoop environment
  299. configurations are written to the correct location since they are written out
  300. before the <stack-selector-tool>/<conf-selector-tool> would have been called.
  301. """
  302. hadoop_conf_dir = "/etc/hadoop/conf"
  303. stack_name = None
  304. stack_root = Script.get_stack_root()
  305. stack_version = Script.get_stack_version()
  306. version = None
  307. allow_setting_conf_select_symlink = False
  308. if not Script.in_stack_upgrade():
  309. # During normal operation, the HDP stack must be 2.3 or higher
  310. if stack_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version):
  311. hadoop_conf_dir = os.path.join(stack_root, "current", "hadoop-client", "conf")
  312. if stack_version and check_stack_feature(StackFeature.CONFIG_VERSIONING, stack_version):
  313. hadoop_conf_dir = os.path.join(stack_root, "current", "hadoop-client", "conf")
  314. stack_name = default("/hostLevelParams/stack_name", None)
  315. version = default("/commandParams/version", None)
  316. if stack_name and version:
  317. version = str(version)
  318. allow_setting_conf_select_symlink = True
  319. else:
  320. # During an upgrade/downgrade, which can be a Rolling or Express Upgrade, need to calculate it based on the version
  321. '''
  322. Whenever upgrading to HDP 2.2, or downgrading back to 2.2, need to use /etc/hadoop/conf
  323. Whenever upgrading to HDP 2.3, or downgrading back to 2.3, need to use a versioned hadoop conf dir
  324. Type__|_Source_|_Target_|_Direction_____________|_Comment_____________________________________________________________
  325. Normal| | 2.2 | | Use /etc/hadoop/conf
  326. Normal| | 2.3 | | Use /etc/hadoop/conf, which should be a symlink to <stack-root>/current/hadoop-client/conf
  327. EU | 2.1 | 2.3 | Upgrade | Use versioned <stack-root>/current/hadoop-client/conf
  328. | | | No Downgrade Allowed | Invalid
  329. EU/RU | 2.2 | 2.2.* | Any | Use <stack-root>/current/hadoop-client/conf
  330. EU/RU | 2.2 | 2.3 | Upgrade | Use <stack-root>/$version/hadoop/conf, which should be a symlink destination
  331. | | | Downgrade | Use <stack-root>/current/hadoop-client/conf
  332. EU/RU | 2.3 | 2.3.* | Any | Use <stack-root>/$version/hadoop/conf, which should be a symlink destination
  333. '''
  334. # The "stack_version" is the desired stack, e.g., 2.2 or 2.3
  335. # In an RU, it is always the desired stack, and doesn't change even during the Downgrade!
  336. # In an RU Downgrade from HDP 2.3 to 2.2, the first thing we do is
  337. # rm /etc/[component]/conf and then mv /etc/[component]/conf.backup /etc/[component]/conf
  338. if stack_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version):
  339. hadoop_conf_dir = os.path.join(stack_root, "current", "hadoop-client", "conf")
  340. # This contains the "version", including the build number, that is actually used during a stack upgrade and
  341. # is the version upgrading/downgrading to.
  342. stack_info = stack_select._get_upgrade_stack()
  343. if stack_info is not None:
  344. stack_name = stack_info[0]
  345. version = stack_info[1]
  346. else:
  347. raise Fail("Unable to get parameter 'version'")
  348. Logger.info("In the middle of a stack upgrade/downgrade for Stack {0} and destination version {1}, determining which hadoop conf dir to use.".format(stack_name, version))
  349. # This is the version either upgrading or downgrading to.
  350. if version and check_stack_feature(StackFeature.CONFIG_VERSIONING, version):
  351. # Determine if <stack-selector-tool> has been run and if not, then use the current
  352. # hdp version until this component is upgraded.
  353. if not force_latest_on_upgrade:
  354. current_stack_version = stack_select.get_role_component_current_stack_version()
  355. if current_stack_version is not None and version != current_stack_version:
  356. version = current_stack_version
  357. stack_selector_name = stack_tools.get_stack_tool_name(stack_tools.STACK_SELECTOR_NAME)
  358. Logger.info("{0} has not yet been called to update the symlink for this component, "
  359. "keep using version {1}".format(stack_selector_name, current_stack_version))
  360. # Only change the hadoop_conf_dir path, don't <conf-selector-tool> this older version
  361. hadoop_conf_dir = os.path.join(stack_root, version, "hadoop", "conf")
  362. Logger.info("Hadoop conf dir: {0}".format(hadoop_conf_dir))
  363. allow_setting_conf_select_symlink = True
  364. if allow_setting_conf_select_symlink:
  365. # If not in the middle of an upgrade and on HDP 2.3 or higher, or if
  366. # upgrading stack to version 2.3.0.0 or higher (which may be upgrade or downgrade), then consider setting the
  367. # symlink for /etc/hadoop/conf.
  368. # If a host does not have any HDFS or YARN components (e.g., only ZK), then it will not contain /etc/hadoop/conf
  369. # Therefore, any calls to <conf-selector-tool> will fail.
  370. # For that reason, if the hadoop conf directory exists, then make sure it is set.
  371. if os.path.exists(hadoop_conf_dir):
  372. conf_selector_name = stack_tools.get_stack_tool_name(stack_tools.CONF_SELECTOR_NAME)
  373. Logger.info("The hadoop conf dir {0} exists, will call {1} on it for version {2}".format(
  374. hadoop_conf_dir, conf_selector_name, version))
  375. select(stack_name, "hadoop", version)
  376. Logger.info("Using hadoop conf dir: {0}".format(hadoop_conf_dir))
  377. return hadoop_conf_dir
  378. def convert_conf_directories_to_symlinks(package, version, dirs, skip_existing_links=True, link_to="current"):
  379. """
  380. Assumes HDP 2.3+, moves around directories and creates the conf symlink for the given package.
  381. If the package does not exist, then no work is performed.
  382. - Creates a /etc/<component>/conf.backup directory
  383. - Copies all configs from /etc/<component>/conf to conf.backup
  384. - Removes /etc/<component>/conf
  385. - Creates /etc/<component>/<version>/0 via <conf-selector-tool>
  386. - <stack-root>/current/<component>-client/conf -> /etc/<component>/<version>/0 via <conf-selector-tool>
  387. - Links /etc/<component>/conf to <something> depending on function paramter
  388. -- /etc/<component>/conf -> <stack-root>/current/[component]-client/conf (usually)
  389. -- /etc/<component>/conf -> /etc/<component>/conf.backup (only when supporting < HDP 2.3)
  390. :param package: the package to create symlinks for (zookeeper, falcon, etc)
  391. :param version: the version number to use with <conf-selector-tool> (2.3.0.0-1234)
  392. :param dirs: the directories associated with the package (from get_package_dirs())
  393. :param skip_existing_links: True to not do any work if already a symlink
  394. :param link_to: link to "current" or "backup"
  395. """
  396. stack_name = Script.get_stack_name()
  397. bad_dirs = []
  398. for dir_def in dirs:
  399. if not os.path.exists(dir_def['conf_dir']):
  400. bad_dirs.append(dir_def['conf_dir'])
  401. if len(bad_dirs) > 0:
  402. Logger.info("Skipping {0} as it does not exist.".format(",".join(bad_dirs)))
  403. return
  404. # existing links should be skipped since we assume there's no work to do
  405. if skip_existing_links:
  406. bad_dirs = []
  407. for dir_def in dirs:
  408. # check if conf is a link already
  409. old_conf = dir_def['conf_dir']
  410. if os.path.islink(old_conf):
  411. Logger.info("{0} is already linked to {1}".format(old_conf, os.path.realpath(old_conf)))
  412. bad_dirs.append(old_conf)
  413. if len(bad_dirs) > 0:
  414. return
  415. # make backup dir and copy everything in case configure() was called after install()
  416. backup_dir = None
  417. for dir_def in dirs:
  418. old_conf = dir_def['conf_dir']
  419. old_parent = os.path.abspath(os.path.join(old_conf, os.pardir))
  420. backup_dir = os.path.join(old_parent, "conf.backup")
  421. Logger.info("Backing up {0} to {1} if destination doesn't exist already.".format(old_conf, backup_dir))
  422. Execute(("cp", "-R", "-p", old_conf, backup_dir),
  423. not_if = format("test -e {backup_dir}"), sudo = True)
  424. # we're already in the HDP stack
  425. # Create the versioned /etc/[component]/[version]/0 folder.
  426. # The component must be installed on the host.
  427. versioned_confs = create(stack_name, package, version, dry_run = True)
  428. Logger.info("Package {0} will have new conf directories: {1}".format(package, ", ".join(versioned_confs)))
  429. need_dirs = []
  430. for d in versioned_confs:
  431. if not os.path.exists(d):
  432. need_dirs.append(d)
  433. if len(need_dirs) > 0:
  434. create(stack_name, package, version)
  435. # find the matching definition and back it up (not the most efficient way) ONLY if there is more than one directory
  436. if len(dirs) > 1:
  437. for need_dir in need_dirs:
  438. for dir_def in dirs:
  439. if 'prefix' in dir_def and need_dir.startswith(dir_def['prefix']):
  440. old_conf = dir_def['conf_dir']
  441. versioned_conf = need_dir
  442. Execute(as_sudo(["cp", "-R", "-p", os.path.join(old_conf, "*"), versioned_conf], auto_escape=False),
  443. only_if = format("ls -d {old_conf}/*"))
  444. elif 1 == len(dirs) and 1 == len(need_dirs):
  445. old_conf = dirs[0]['conf_dir']
  446. versioned_conf = need_dirs[0]
  447. Execute(as_sudo(["cp", "-R", "-p", os.path.join(old_conf, "*"), versioned_conf], auto_escape=False),
  448. only_if = format("ls -d {old_conf}/*"))
  449. # <stack-root>/current/[component] is already set to to the correct version, e.g., <stack-root>/[version]/[component]
  450. select(stack_name, package, version, ignore_errors = True)
  451. # Symlink /etc/[component]/conf to /etc/[component]/conf.backup
  452. try:
  453. # No more references to /etc/[component]/conf
  454. for dir_def in dirs:
  455. # E.g., /etc/[component]/conf
  456. new_symlink = dir_def['conf_dir']
  457. # Remove new_symlink to pave the way, but only if it's a directory
  458. if not os.path.islink(new_symlink):
  459. Directory(new_symlink, action="delete")
  460. if link_to in ["current", "backup"]:
  461. # link /etc/[component]/conf -> <stack-root>/current/[component]-client/conf
  462. if link_to == "backup":
  463. Link(new_symlink, to = backup_dir)
  464. else:
  465. Link(new_symlink, to = dir_def['current_dir'])
  466. else:
  467. Logger.error("Unsupported 'link_to' argument. Could not link package {0}".format(package))
  468. except Exception, e:
  469. Logger.warning("Could not change symlink for package {0} to point to {1} directory. Error: {2}".format(package, link_to, e))
  470. def _seed_new_configuration_directories(package, created_directories):
  471. """
  472. Copies any files from the "current" configuration directory to the directories which were
  473. newly created with <conf-selector-tool>. This function helps ensure that files which are not tracked
  474. by Ambari will be available after performing a stack upgrade. Although old configurations
  475. will be copied as well, they will be overwritten when the components are writing out their
  476. configs after upgrade during their restart.
  477. This function will catch all errors, logging them, but not raising an exception. This is to
  478. prevent problems here from stopping and otherwise healthy upgrade.
  479. :param package: the <conf-selector-tool> package name
  480. :param created_directories: a list of directories that <conf-selector-tool> said it created
  481. :return: None
  482. """
  483. package_dirs = get_package_dirs()
  484. if package not in package_dirs:
  485. Logger.warning("Unable to seed newly created configuration directories for {0} because it is an unknown component".format(package))
  486. return
  487. # seed the directories with any existing configurations
  488. # this allows files which are not tracked by Ambari to be available after an upgrade
  489. Logger.info("Seeding versioned configuration directories for {0}".format(package))
  490. expected_directories = package_dirs[package]
  491. try:
  492. # if the expected directories don't match those created, we can't seed them
  493. if len(created_directories) != len(expected_directories):
  494. Logger.warning("The known configuration directories for {0} do not match those created by conf-select: {1}".format(
  495. package, str(created_directories)))
  496. return
  497. # short circuit for a simple 1:1 mapping
  498. if len(expected_directories) == 1:
  499. # <stack-root>/current/component/conf
  500. # the current directory is the source of the seeded configurations;
  501. source_seed_directory = expected_directories[0]["current_dir"]
  502. target_seed_directory = created_directories[0]
  503. _copy_configurations(source_seed_directory, target_seed_directory)
  504. else:
  505. for created_directory in created_directories:
  506. for expected_directory_structure in expected_directories:
  507. prefix = expected_directory_structure.get("prefix", None)
  508. if prefix is not None and created_directory.startswith(prefix):
  509. source_seed_directory = expected_directory_structure["current_dir"]
  510. target_seed_directory = created_directory
  511. _copy_configurations(source_seed_directory, target_seed_directory)
  512. except Exception, e:
  513. Logger.warning("Unable to seed new configuration directories for {0}. {1}".format(package, str(e)))
  514. def _copy_configurations(source_directory, target_directory):
  515. """
  516. Copies from the source directory to the target directory. If the source directory is a symlink
  517. then it will be followed (deferenced) but any other symlinks found to copy will not be. This
  518. will ensure that if the configuration directory itself is a symlink, then it's contents will be
  519. copied, preserving and children found which are also symlinks.
  520. :param source_directory: the source directory to copy from
  521. :param target_directory: the target directory to copy to
  522. :return: None
  523. """
  524. # append trailing slash so the cp command works correctly WRT recursion and symlinks
  525. source_directory = os.path.join(source_directory, "*")
  526. Execute(as_sudo(["cp", "-R", "-p", "-v", source_directory, target_directory], auto_escape = False),
  527. logoutput = True)