소스 검색

AMBARI-14543 - Adding A Removed Host Back To a Cluster Post-Upgrade Does Not Install New Stacks Correctly (part2) (jonathanhurley)

Jonathan Hurley 9 년 전
부모
커밋
f48abf4bd3

+ 14 - 9
ambari-common/src/main/python/resource_management/libraries/functions/hdp_select.py

@@ -105,18 +105,23 @@ HADOOP_DIR_DEFAULTS = {
   "lib": "/usr/lib/hadoop/lib"
 }
 
-def select_all(stack_version):
+def select_all(version_to_select):
   """
-  Executes hdp-select on every component for the latest installed version of the specified stack.
-  For example, if stack_version is "2.3", then this will find the latest installed version which
-  could be "2.3.0.0-9999".
-  :param stack_version: the stack version to use when calculating the latest actual version,
-  such as "2.3".
+  Executes hdp-select on every component for the specified version. If the value passed in is a
+  stack version such as "2.3", then this will find the latest installed version which
+  could be "2.3.0.0-9999". If a version is specified instead, such as 2.3.0.0-1234, it will use
+  that exact version.
+  :param version_to_select: the version to hdp-select on, such as "2.3" or "2.3.0.0-1234"
   """
-  Logger.info("Executing hdp-select set all on the latest calculated version for stack {0}".format(stack_version))
+  # it's an error, but it shouldn't really stop anything from working
+  if version_to_select is None:
+    Logger.error("Unable to execute hdp-select after installing because there was no version specified")
+    return
 
-  command = format('{sudo} /usr/bin/hdp-select set all `ambari-python-wrap /usr/bin/hdp-select versions | grep ^{stack_version} | tail -1`')
-  only_if_command = format('ls -d /usr/hdp/{stack_version}*')
+  Logger.info("Executing hdp-select set all on {0}".format(version_to_select))
+
+  command = format('{sudo} /usr/bin/hdp-select set all `ambari-python-wrap /usr/bin/hdp-select versions | grep ^{version_to_select} | tail -1`')
+  only_if_command = format('ls -d /usr/hdp/{version_to_select}*')
   Execute(command, only_if = only_if_command)
 
 

+ 2 - 2
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java

@@ -20,6 +20,7 @@ package org.apache.ambari.server.controller;
 
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMPONENT_CATEGORY;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.REPO_INFO;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_NAME;
@@ -57,7 +58,6 @@ import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpInProgressEvent;
 import org.apache.ambari.server.utils.SecretReference;
-import org.apache.ambari.server.utils.StageUtils;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -499,7 +499,7 @@ public class AmbariActionExecutionHelper {
       }
     }
 
-    hostLevelParams.put("repo_info", rootJsonObject.toString());
+    hostLevelParams.put(REPO_INFO, rootJsonObject.toString());
 
     StackId stackId = cluster.getCurrentStackVersion();
     hostLevelParams.put(STACK_NAME, stackId.getStackName());

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py

@@ -25,9 +25,6 @@ from resource_management.libraries.functions import hdp_select
 from resource_management.libraries.functions import format_jvm_option
 from resource_management.libraries.functions.version import format_hdp_stack_version
 
-from resource_management.core.system import System
-from ambari_commons.os_check import OSCheck
-
 config = Script.get_config()
 
 dfs_type = default("/commandParams/dfs_type", "")
@@ -37,6 +34,9 @@ sudo = AMBARI_SUDO_BINARY
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
+# current host stack version
+current_version = default("/hostLevelParams/current_version", None)
+
 # default hadoop params
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 hadoop_libexec_dir = hdp_select.get_hadoop_dir("libexec")

+ 4 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py

@@ -38,7 +38,10 @@ def setup_hdp_symlinks():
   """
   import params
   if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
-    hdp_select.select_all(params.stack_version_unformatted)
+    # try using the exact version first, falling back in just the stack if it's not defined
+    # which would only be during an intial cluster installation
+    version = params.current_version if params.current_version is not None else params.stack_version_unformatted
+    hdp_select.select_all(version)
 
 
 def setup_config():

+ 40 - 0
ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py

@@ -512,3 +512,43 @@ class TestHookAfterInstall(RMFTestCase):
         to = '/usr/hdp/current/hive-client/conf')
 
     self.assertNoMoreResources()
+
+
+  @patch("shared_initialization.load_version", new = MagicMock(return_value="2.3.0.0-1243"))
+  @patch("resource_management.libraries.functions.conf_select.create")
+  @patch("resource_management.libraries.functions.conf_select.select")
+  @patch("os.symlink")
+  @patch("shutil.rmtree")
+  def test_hook_default_hdp_select_specific_version(self, rmtree_mock, symlink_mock, conf_select_select_mock, conf_select_create_mock):
+    """
+    Tests that hdp-select set all on a specific version, not a 2.3* wildcard is used when
+    installing a component when the cluster version is already set.
+
+    :param rmtree_mock:
+    :param symlink_mock:
+    :param conf_select_select_mock:
+    :param conf_select_create_mock:
+    :return:
+    """
+
+    def mocked_conf_select(arg1, arg2, arg3, dry_run = False):
+      return "/etc/{0}/{1}/0".format(arg2, arg3)
+
+    conf_select_create_mock.side_effect = mocked_conf_select
+
+    config_file = self.get_src_folder() + "/test/python/stacks/2.0.6/configs/default.json"
+    with open(config_file, "r") as f:
+      json_content = json.load(f)
+
+    version = '2.3.0.0-1234'
+    json_content['commandParams']['version'] = version
+    json_content['hostLevelParams']['stack_version'] = "2.3"
+    json_content['hostLevelParams']['current_version'] = "2.3.0.0-1234"
+
+    self.executeScript("2.0.6/hooks/after-INSTALL/scripts/hook.py",
+      classname="AfterInstallHook",
+      command="hook",
+      config_dict = json_content)
+
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh /usr/bin/hdp-select set all `ambari-python-wrap /usr/bin/hdp-select versions | grep ^2.3.0.0-1234 | tail -1`',
+      only_if = 'ls -d /usr/hdp/2.3.0.0-1234*')