123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601 |
- #!/usr/bin/env python
- """
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- """
- __all__ = ["select", "create", "get_hadoop_conf_dir", "get_hadoop_dir", "get_package_dirs"]
- import copy
- import os
- import version
- import stack_select
- import subprocess
- from resource_management.core import shell
- from resource_management.libraries.functions.format import format
- from resource_management.libraries.script.script import Script
- from resource_management.core.logger import Logger
- from resource_management.core.resources.system import Directory
- from resource_management.core.resources.system import Execute
- from resource_management.core.resources.system import Link
- from resource_management.libraries.functions.default import default
- from resource_management.libraries.functions import stack_tools
- from resource_management.core.exceptions import Fail
- from resource_management.core.shell import as_sudo
- from resource_management.libraries.functions.stack_features import check_stack_feature
- from resource_management.libraries.functions import StackFeature
- STACK_ROOT_PATTERN = "{{ stack_root }}"
- _PACKAGE_DIRS = {
- "accumulo": [
- {
- "conf_dir": "/etc/accumulo/conf",
- "current_dir": "{0}/current/accumulo-client/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "falcon": [
- {
- "conf_dir": "/etc/falcon/conf",
- "current_dir": "{0}/current/falcon-client/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "hadoop": [
- {
- "conf_dir": "/etc/hadoop/conf",
- "current_dir": "{0}/current/hadoop-client/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "hbase": [
- {
- "conf_dir": "/etc/hbase/conf",
- "current_dir": "{0}/current/hbase-client/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "hive": [
- {
- "conf_dir": "/etc/hive/conf",
- "current_dir": "{0}/current/hive-client/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "hive2": [
- {
- "conf_dir": "/etc/hive2/conf",
- "current_dir": "{0}/current/hive-server2-hive2/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "kafka": [
- {
- "conf_dir": "/etc/kafka/conf",
- "current_dir": "{0}/current/kafka-broker/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "knox": [
- {
- "conf_dir": "/etc/knox/conf",
- "current_dir": "{0}/current/knox-server/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "mahout": [
- {
- "conf_dir": "/etc/mahout/conf",
- "current_dir": "{0}/current/mahout-client/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "oozie": [
- {
- "conf_dir": "/etc/oozie/conf",
- "current_dir": "{0}/current/oozie-client/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "phoenix": [
- {
- "conf_dir": "/etc/phoenix/conf",
- "current_dir": "{0}/current/phoenix-client/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "ranger-admin": [
- {
- "conf_dir": "/etc/ranger/admin/conf",
- "current_dir": "{0}/current/ranger-admin/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "ranger-tagsync": [
- {
- "conf_dir": "/etc/ranger/tagsync/conf",
- "current_dir": "{0}/current/ranger-tagsync/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "ranger-kms": [
- {
- "conf_dir": "/etc/ranger/kms/conf",
- "current_dir": "{0}/current/ranger-kms/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "ranger-usersync": [
- {
- "conf_dir": "/etc/ranger/usersync/conf",
- "current_dir": "{0}/current/ranger-usersync/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "slider": [
- {
- "conf_dir": "/etc/slider/conf",
- "current_dir": "{0}/current/slider-client/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "spark": [
- {
- "conf_dir": "/etc/spark/conf",
- "current_dir": "{0}/current/spark-client/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "sqoop": [
- {
- "conf_dir": "/etc/sqoop/conf",
- "current_dir": "{0}/current/sqoop-client/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "storm": [
- {
- "conf_dir": "/etc/storm/conf",
- "current_dir": "{0}/current/storm-client/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "tez": [
- {
- "conf_dir": "/etc/tez/conf",
- "current_dir": "{0}/current/tez-client/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "zookeeper": [
- {
- "conf_dir": "/etc/zookeeper/conf",
- "current_dir": "{0}/current/zookeeper-client/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "pig": [
- {
- "conf_dir": "/etc/pig/conf",
- "current_dir": "{0}/current/pig-client/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "flume": [
- {
- "conf_dir": "/etc/flume/conf",
- "current_dir": "{0}/current/flume-server/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "storm-slider-client": [
- {
- "conf_dir": "/etc/storm-slider-client/conf",
- "current_dir": "{0}/current/storm-slider-client/conf".format(STACK_ROOT_PATTERN)
- }
- ],
- "hive-hcatalog": [
- {
- "conf_dir": "/etc/hive-webhcat/conf",
- "prefix": "/etc/hive-webhcat",
- "current_dir": "{0}/current/hive-webhcat/etc/webhcat".format(STACK_ROOT_PATTERN)
- },
- {
- "conf_dir": "/etc/hive-hcatalog/conf",
- "prefix": "/etc/hive-hcatalog",
- "current_dir": "{0}/current/hive-webhcat/etc/hcatalog".format(STACK_ROOT_PATTERN)
- }
- ]
- }
- def _get_cmd(command, package, version):
- conf_selector_path = stack_tools.get_stack_tool_path(stack_tools.CONF_SELECTOR_NAME)
- return ('ambari-python-wrap', conf_selector_path, command, '--package', package, '--stack-version', version, '--conf-version', '0')
- def _valid(stack_name, package, ver):
- return (ver and check_stack_feature(StackFeature.CONFIG_VERSIONING, ver))
- def get_package_dirs():
- """
- Get package dir mappings
- :return:
- """
- stack_root = Script.get_stack_root()
- package_dirs = copy.deepcopy(_PACKAGE_DIRS)
- for package_name, directories in package_dirs.iteritems():
- for dir in directories:
- current_dir = dir['current_dir']
- current_dir = current_dir.replace(STACK_ROOT_PATTERN, stack_root)
- dir['current_dir'] = current_dir
- return package_dirs
- def create(stack_name, package, version, dry_run = False):
- """
- Creates a config version for the specified package
- :param stack_name: the name of the stack
- :param package: the name of the package, as-used by <conf-selector-tool>
- :param version: the version number to create
- :param dry_run: False to create the versioned config directory, True to only return what would be created
- :return List of directories created
- """
- Logger.info("Checking if need to create versioned conf dir /etc/{0}/{1}/0".format(package, version))
- if not _valid(stack_name, package, version):
- Logger.info("Will not create it since parameters are not valid.")
- return []
- command = "dry-run-create" if dry_run else "create-conf-dir"
- code, stdout, stderr = shell.call(_get_cmd(command, package, version), logoutput=False, quiet=False, sudo=True, stderr = subprocess.PIPE)
- # <conf-selector-tool> can set more than one directory
- # per package, so return that list, especially for dry_run
- # > <conf-selector-tool> dry-run-create --package hive-hcatalog --stack-version 2.4.0.0-169 0
- # /etc/hive-webhcat/2.4.0.0-169/0
- # /etc/hive-hcatalog/2.4.0.0-169/0
- created_directories = []
- if 0 == code and stdout is not None: # just be sure we have a stdout
- for line in stdout.splitlines():
- created_directories.append(line.rstrip('\n'))
- # if directories were created, then do some post-processing
- if not code and stdout and not dry_run:
- # take care of permissions if directories were created
- for directory in created_directories:
- Directory(directory, mode=0755, cd_access='a', create_parents=True)
- # seed the new directories with configurations from the old (current) directories
- _seed_new_configuration_directories(package, created_directories)
- return created_directories
- def select(stack_name, package, version, try_create=True, ignore_errors=False):
- """
- Selects a config version for the specified package.
- :param stack_name: the name of the stack
- :param package: the name of the package, as-used by <conf-selector-tool>
- :param version: the version number to create
- :param try_create: optional argument to attempt to create the directory before setting it
- :param ignore_errors: optional argument to ignore any error and simply log a warning
- """
- try:
- if not _valid(stack_name, package, version):
- return
- if try_create:
- create(stack_name, package, version)
- shell.checked_call(_get_cmd("set-conf-dir", package, version), logoutput=False, quiet=False, sudo=True)
- # for consistency sake, we must ensure that the /etc/<component>/conf symlink exists and
- # points to <stack-root>/current/<component>/conf - this is because some people still prefer to
- # use /etc/<component>/conf even though <stack-root> is the "future"
- package_dirs = get_package_dirs()
- if package in package_dirs:
- Logger.info("Ensuring that {0} has the correct symlink structure".format(package))
- directory_list = package_dirs[package]
- for directory_structure in directory_list:
- conf_dir = directory_structure["conf_dir"]
- current_dir = directory_structure["current_dir"]
- # if /etc/<component>/conf is not a symlink, we need to change it
- if not os.path.islink(conf_dir):
- # if it exists, try to back it up
- if os.path.exists(conf_dir):
- parent_directory = os.path.dirname(conf_dir)
- conf_install_dir = os.path.join(parent_directory, "conf.backup")
- Execute(("cp", "-R", "-p", conf_dir, conf_install_dir),
- not_if = format("test -e {conf_install_dir}"), sudo = True)
- Directory(conf_dir, action="delete")
- Link(conf_dir, to = current_dir)
- except Exception, exception:
- if ignore_errors is True:
- Logger.warning("Could not select the directory for package {0}. Error: {1}".format(package,
- str(exception)))
- else:
- raise
- def get_hadoop_conf_dir(force_latest_on_upgrade=False):
- """
- Gets the shared hadoop conf directory using:
- 1. Start with /etc/hadoop/conf
- 2. When the stack is greater than HDP-2.2, use <stack-root>/current/hadoop-client/conf
- 3. Only when doing a RU and HDP-2.3 or higher, use the value as computed
- by <conf-selector-tool>. This is in the form <stack-root>/VERSION/hadoop/conf to make sure
- the configs are written in the correct place. However, if the component itself has
- not yet been upgraded, it should use the hadoop configs from the prior version.
- This will perform an <stack-selector-tool> status to determine which version to use.
- :param force_latest_on_upgrade: if True, then force the returned path to always
- be that of the upgrade target version, even if <stack-selector-tool> has not been called. This
- is primarily used by hooks like before-ANY to ensure that hadoop environment
- configurations are written to the correct location since they are written out
- before the <stack-selector-tool>/<conf-selector-tool> would have been called.
- """
- hadoop_conf_dir = "/etc/hadoop/conf"
- stack_name = None
- stack_root = Script.get_stack_root()
- stack_version = Script.get_stack_version()
- version = None
- allow_setting_conf_select_symlink = False
- if not Script.in_stack_upgrade():
- # During normal operation, the HDP stack must be 2.3 or higher
- if stack_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version):
- hadoop_conf_dir = os.path.join(stack_root, "current", "hadoop-client", "conf")
- if stack_version and check_stack_feature(StackFeature.CONFIG_VERSIONING, stack_version):
- hadoop_conf_dir = os.path.join(stack_root, "current", "hadoop-client", "conf")
- stack_name = default("/hostLevelParams/stack_name", None)
- version = default("/commandParams/version", None)
- if stack_name and version:
- version = str(version)
- allow_setting_conf_select_symlink = True
- else:
- # During an upgrade/downgrade, which can be a Rolling or Express Upgrade, need to calculate it based on the version
- '''
- Whenever upgrading to HDP 2.2, or downgrading back to 2.2, need to use /etc/hadoop/conf
- Whenever upgrading to HDP 2.3, or downgrading back to 2.3, need to use a versioned hadoop conf dir
- Type__|_Source_|_Target_|_Direction_____________|_Comment_____________________________________________________________
- Normal| | 2.2 | | Use /etc/hadoop/conf
- Normal| | 2.3 | | Use /etc/hadoop/conf, which should be a symlink to <stack-root>/current/hadoop-client/conf
- EU | 2.1 | 2.3 | Upgrade | Use versioned <stack-root>/current/hadoop-client/conf
- | | | No Downgrade Allowed | Invalid
- EU/RU | 2.2 | 2.2.* | Any | Use <stack-root>/current/hadoop-client/conf
- EU/RU | 2.2 | 2.3 | Upgrade | Use <stack-root>/$version/hadoop/conf, which should be a symlink destination
- | | | Downgrade | Use <stack-root>/current/hadoop-client/conf
- EU/RU | 2.3 | 2.3.* | Any | Use <stack-root>/$version/hadoop/conf, which should be a symlink destination
- '''
- # The "stack_version" is the desired stack, e.g., 2.2 or 2.3
- # In an RU, it is always the desired stack, and doesn't change even during the Downgrade!
- # In an RU Downgrade from HDP 2.3 to 2.2, the first thing we do is
- # rm /etc/[component]/conf and then mv /etc/[component]/conf.backup /etc/[component]/conf
- if stack_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version):
- hadoop_conf_dir = os.path.join(stack_root, "current", "hadoop-client", "conf")
- # This contains the "version", including the build number, that is actually used during a stack upgrade and
- # is the version upgrading/downgrading to.
- stack_info = stack_select._get_upgrade_stack()
- if stack_info is not None:
- stack_name = stack_info[0]
- version = stack_info[1]
- else:
- raise Fail("Unable to get parameter 'version'")
-
- Logger.info("In the middle of a stack upgrade/downgrade for Stack {0} and destination version {1}, determining which hadoop conf dir to use.".format(stack_name, version))
- # This is the version either upgrading or downgrading to.
- if version and check_stack_feature(StackFeature.CONFIG_VERSIONING, version):
- # Determine if <stack-selector-tool> has been run and if not, then use the current
- # hdp version until this component is upgraded.
- if not force_latest_on_upgrade:
- current_stack_version = stack_select.get_role_component_current_stack_version()
- if current_stack_version is not None and version != current_stack_version:
- version = current_stack_version
- stack_selector_name = stack_tools.get_stack_tool_name(stack_tools.STACK_SELECTOR_NAME)
- Logger.info("{0} has not yet been called to update the symlink for this component, "
- "keep using version {1}".format(stack_selector_name, current_stack_version))
- # Only change the hadoop_conf_dir path, don't <conf-selector-tool> this older version
- hadoop_conf_dir = os.path.join(stack_root, version, "hadoop", "conf")
- Logger.info("Hadoop conf dir: {0}".format(hadoop_conf_dir))
- allow_setting_conf_select_symlink = True
- if allow_setting_conf_select_symlink:
- # If not in the middle of an upgrade and on HDP 2.3 or higher, or if
- # upgrading stack to version 2.3.0.0 or higher (which may be upgrade or downgrade), then consider setting the
- # symlink for /etc/hadoop/conf.
- # If a host does not have any HDFS or YARN components (e.g., only ZK), then it will not contain /etc/hadoop/conf
- # Therefore, any calls to <conf-selector-tool> will fail.
- # For that reason, if the hadoop conf directory exists, then make sure it is set.
- if os.path.exists(hadoop_conf_dir):
- conf_selector_name = stack_tools.get_stack_tool_name(stack_tools.CONF_SELECTOR_NAME)
- Logger.info("The hadoop conf dir {0} exists, will call {1} on it for version {2}".format(
- hadoop_conf_dir, conf_selector_name, version))
- select(stack_name, "hadoop", version)
- Logger.info("Using hadoop conf dir: {0}".format(hadoop_conf_dir))
- return hadoop_conf_dir
- def convert_conf_directories_to_symlinks(package, version, dirs, skip_existing_links=True, link_to="current"):
- """
- Assumes HDP 2.3+, moves around directories and creates the conf symlink for the given package.
- If the package does not exist, then no work is performed.
- - Creates a /etc/<component>/conf.backup directory
- - Copies all configs from /etc/<component>/conf to conf.backup
- - Removes /etc/<component>/conf
- - Creates /etc/<component>/<version>/0 via <conf-selector-tool>
- - <stack-root>/current/<component>-client/conf -> /etc/<component>/<version>/0 via <conf-selector-tool>
- - Links /etc/<component>/conf to <something> depending on function paramter
- -- /etc/<component>/conf -> <stack-root>/current/[component]-client/conf (usually)
- -- /etc/<component>/conf -> /etc/<component>/conf.backup (only when supporting < HDP 2.3)
- :param package: the package to create symlinks for (zookeeper, falcon, etc)
- :param version: the version number to use with <conf-selector-tool> (2.3.0.0-1234)
- :param dirs: the directories associated with the package (from get_package_dirs())
- :param skip_existing_links: True to not do any work if already a symlink
- :param link_to: link to "current" or "backup"
- """
- stack_name = Script.get_stack_name()
- bad_dirs = []
- for dir_def in dirs:
- if not os.path.exists(dir_def['conf_dir']):
- bad_dirs.append(dir_def['conf_dir'])
- if len(bad_dirs) > 0:
- Logger.info("Skipping {0} as it does not exist.".format(",".join(bad_dirs)))
- return
- # existing links should be skipped since we assume there's no work to do
- if skip_existing_links:
- bad_dirs = []
- for dir_def in dirs:
- # check if conf is a link already
- old_conf = dir_def['conf_dir']
- if os.path.islink(old_conf):
- Logger.info("{0} is already linked to {1}".format(old_conf, os.path.realpath(old_conf)))
- bad_dirs.append(old_conf)
- if len(bad_dirs) > 0:
- return
- # make backup dir and copy everything in case configure() was called after install()
- backup_dir = None
- for dir_def in dirs:
- old_conf = dir_def['conf_dir']
- old_parent = os.path.abspath(os.path.join(old_conf, os.pardir))
- backup_dir = os.path.join(old_parent, "conf.backup")
- Logger.info("Backing up {0} to {1} if destination doesn't exist already.".format(old_conf, backup_dir))
- Execute(("cp", "-R", "-p", old_conf, backup_dir),
- not_if = format("test -e {backup_dir}"), sudo = True)
- # we're already in the HDP stack
- # Create the versioned /etc/[component]/[version]/0 folder.
- # The component must be installed on the host.
- versioned_confs = create(stack_name, package, version, dry_run = True)
- Logger.info("Package {0} will have new conf directories: {1}".format(package, ", ".join(versioned_confs)))
- need_dirs = []
- for d in versioned_confs:
- if not os.path.exists(d):
- need_dirs.append(d)
- if len(need_dirs) > 0:
- create(stack_name, package, version)
- # find the matching definition and back it up (not the most efficient way) ONLY if there is more than one directory
- if len(dirs) > 1:
- for need_dir in need_dirs:
- for dir_def in dirs:
- if 'prefix' in dir_def and need_dir.startswith(dir_def['prefix']):
- old_conf = dir_def['conf_dir']
- versioned_conf = need_dir
- Execute(as_sudo(["cp", "-R", "-p", os.path.join(old_conf, "*"), versioned_conf], auto_escape=False),
- only_if = format("ls -d {old_conf}/*"))
- elif 1 == len(dirs) and 1 == len(need_dirs):
- old_conf = dirs[0]['conf_dir']
- versioned_conf = need_dirs[0]
- Execute(as_sudo(["cp", "-R", "-p", os.path.join(old_conf, "*"), versioned_conf], auto_escape=False),
- only_if = format("ls -d {old_conf}/*"))
- # <stack-root>/current/[component] is already set to to the correct version, e.g., <stack-root>/[version]/[component]
-
- select(stack_name, package, version, ignore_errors = True)
- # Symlink /etc/[component]/conf to /etc/[component]/conf.backup
- try:
- # No more references to /etc/[component]/conf
- for dir_def in dirs:
- # E.g., /etc/[component]/conf
- new_symlink = dir_def['conf_dir']
- # Remove new_symlink to pave the way, but only if it's a directory
- if not os.path.islink(new_symlink):
- Directory(new_symlink, action="delete")
- if link_to in ["current", "backup"]:
- # link /etc/[component]/conf -> <stack-root>/current/[component]-client/conf
- if link_to == "backup":
- Link(new_symlink, to = backup_dir)
- else:
- Link(new_symlink, to = dir_def['current_dir'])
- else:
- Logger.error("Unsupported 'link_to' argument. Could not link package {0}".format(package))
- except Exception, e:
- Logger.warning("Could not change symlink for package {0} to point to {1} directory. Error: {2}".format(package, link_to, e))
- def _seed_new_configuration_directories(package, created_directories):
- """
- Copies any files from the "current" configuration directory to the directories which were
- newly created with <conf-selector-tool>. This function helps ensure that files which are not tracked
- by Ambari will be available after performing a stack upgrade. Although old configurations
- will be copied as well, they will be overwritten when the components are writing out their
- configs after upgrade during their restart.
- This function will catch all errors, logging them, but not raising an exception. This is to
- prevent problems here from stopping and otherwise healthy upgrade.
- :param package: the <conf-selector-tool> package name
- :param created_directories: a list of directories that <conf-selector-tool> said it created
- :return: None
- """
- package_dirs = get_package_dirs()
- if package not in package_dirs:
- Logger.warning("Unable to seed newly created configuration directories for {0} because it is an unknown component".format(package))
- return
- # seed the directories with any existing configurations
- # this allows files which are not tracked by Ambari to be available after an upgrade
- Logger.info("Seeding versioned configuration directories for {0}".format(package))
- expected_directories = package_dirs[package]
- try:
- # if the expected directories don't match those created, we can't seed them
- if len(created_directories) != len(expected_directories):
- Logger.warning("The known configuration directories for {0} do not match those created by conf-select: {1}".format(
- package, str(created_directories)))
- return
- # short circuit for a simple 1:1 mapping
- if len(expected_directories) == 1:
- # <stack-root>/current/component/conf
- # the current directory is the source of the seeded configurations;
- source_seed_directory = expected_directories[0]["current_dir"]
- target_seed_directory = created_directories[0]
- _copy_configurations(source_seed_directory, target_seed_directory)
- else:
- for created_directory in created_directories:
- for expected_directory_structure in expected_directories:
- prefix = expected_directory_structure.get("prefix", None)
- if prefix is not None and created_directory.startswith(prefix):
- source_seed_directory = expected_directory_structure["current_dir"]
- target_seed_directory = created_directory
- _copy_configurations(source_seed_directory, target_seed_directory)
- except Exception, e:
- Logger.warning("Unable to seed new configuration directories for {0}. {1}".format(package, str(e)))
- def _copy_configurations(source_directory, target_directory):
- """
- Copies from the source directory to the target directory. If the source directory is a symlink
- then it will be followed (deferenced) but any other symlinks found to copy will not be. This
- will ensure that if the configuration directory itself is a symlink, then it's contents will be
- copied, preserving and children found which are also symlinks.
- :param source_directory: the source directory to copy from
- :param target_directory: the target directory to copy to
- :return: None
- """
- # append trailing slash so the cp command works correctly WRT recursion and symlinks
- source_directory = os.path.join(source_directory, "*")
- Execute(as_sudo(["cp", "-R", "-p", "-v", source_directory, target_directory], auto_escape = False),
- logoutput = True)
|