瀏覽代碼

Revert "AMBARI-9081. Rolling Upgrades: clients do not send information about their version on restart (alejandro)"

Reverting since this introduces a bunch of UT failures.

This reverts commit 89eeb617154f98c3bf264802b91ea568dd172c56.
Yusaku Sako 10 年之前
父節點
當前提交
992b0cb50b
共有 44 個文件被更改,包括 110 次插入116 次删除
  1. 8 2
      ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
  2. 5 19
      ambari-agent/src/test/python/resource_management/TestScript.py
  3. 1 1
      ambari-common/src/main/python/resource_management/libraries/script/hook.py
  4. 32 63
      ambari-common/src/main/python/resource_management/libraries/script/script.py
  5. 3 1
      ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
  6. 2 0
      ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
  7. 2 0
      ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
  8. 2 0
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
  9. 2 0
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
  10. 2 0
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
  11. 2 0
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
  12. 1 4
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
  13. 2 0
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
  14. 2 0
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
  15. 2 0
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
  16. 3 4
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
  17. 1 3
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
  18. 2 0
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
  19. 2 0
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
  20. 2 0
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
  21. 2 0
      ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka_broker.py
  22. 2 0
      ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py
  23. 2 0
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py
  24. 1 0
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
  25. 0 2
      ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
  26. 0 3
      ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
  27. 2 0
      ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py
  28. 2 0
      ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/service_check.py
  29. 2 0
      ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/sqoop_client.py
  30. 2 0
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/nimbus.py
  31. 2 0
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/rest_api.py
  32. 2 0
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/supervisor.py
  33. 0 2
      ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
  34. 0 3
      ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py
  35. 2 0
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py
  36. 1 0
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
  37. 0 3
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py
  38. 2 0
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py
  39. 2 0
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
  40. 0 3
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py
  41. 2 0
      ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/zookeeper_client.py
  42. 2 0
      ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/zookeeper_server.py
  43. 1 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py
  44. 1 0
      ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java

+ 8 - 2
ambari-agent/src/main/python/ambari_agent/PythonExecutor.py

@@ -71,9 +71,15 @@ class PythonExecutor:
     Timeout meaning: how many seconds should pass before script execution
     is forcibly terminated
     override_output_files option defines whether stdout/stderr files will be
-    recreated or appended.
-    The structured out file, however, is preserved during multiple invocations that use the same file.
+    recreated or appended
     """
+    # need to remove this file for the following case:
+    # status call 1 does not write to file; call 2 writes to file;
+    # call 3 does not write to file, so contents are still call 2's result
+    try:
+      os.unlink(tmpstructedoutfile)
+    except OSError:
+      pass # no error
 
     script_params += [tmpstructedoutfile, logger_level, tmp_dir]
     pythonCommand = self.python_command(script, script_params)

+ 5 - 19
ambari-agent/src/test/python/resource_management/TestScript.py

@@ -25,7 +25,6 @@ import pprint
 from unittest import TestCase
 import threading
 import tempfile
-import shutil
 import time
 from threading import Thread
 
@@ -44,11 +43,7 @@ class TestScript(TestCase):
     out = StringIO.StringIO()
     sys.stdout = out
 
-    # Temporary files and directories needed as args to Script(),
-    # which must be deleted in teardown
-    self.tmp_command_file = tempfile.NamedTemporaryFile()
-    self.tmp_structured_out_file = tempfile.NamedTemporaryFile()
-    self.tmp_data_dir = tempfile.mkdtemp()
+
 
   @patch("resource_management.core.providers.package.PackageProvider")
   def test_install_packages(self, package_provider_mock):
@@ -72,11 +67,9 @@ class TestScript(TestCase):
       }
     }
 
-    args = [str(os.getcwd()), "INSTALL",  str(self.tmp_command_file), str(os.getcwd()), str(self.tmp_structured_out_file), "INFO",  str(self.tmp_data_dir)]
-
     # Testing config without any keys
     with Environment(".", test_mode=True) as env:
-      script = Script(args)
+      script = Script()
       Script.config = no_packages_config
       script.install_packages(env)
     resource_dump = pprint.pformat(env.resource_list)
@@ -84,7 +77,7 @@ class TestScript(TestCase):
 
     # Testing empty package list
     with Environment(".", test_mode=True) as env:
-      script = Script(args)
+      script = Script()
       Script.config = empty_config
       script.install_packages(env)
     resource_dump = pprint.pformat(env.resource_list)
@@ -92,7 +85,6 @@ class TestScript(TestCase):
 
     # Testing installing of a list of packages
     with Environment(".", test_mode=True) as env:
-      script = Script(args)
       Script.config = dummy_config
       script.install_packages("env")
     resource_dump = pprint.pformat(env.resource_list)
@@ -100,10 +92,9 @@ class TestScript(TestCase):
 
   @patch("__builtin__.open")
   def test_structured_out(self, open_mock):
-    args = [str(os.getcwd()), "INSTALL",  str(self.tmp_command_file), str(os.getcwd()), str(self.tmp_structured_out_file), "INFO",  str(self.tmp_data_dir)]
-
-    script = Script(args)
+    script = Script()
     script.stroutfile = ''
+
     self.assertEqual(Script.structuredOut, {})
 
     script.put_structured_out({"1": "1"})
@@ -119,13 +110,8 @@ class TestScript(TestCase):
     self.assertEqual(open_mock.call_count, 3)
     self.assertEqual(Script.structuredOut, {"1": "3", "2": "2"})
 
-
   def tearDown(self):
     # enable stdout
     sys.stdout = sys.__stdout__
-    try:
-      shutil.rmtree(self.tmp_data_dir)
-    except:
-      pass
 
 

+ 1 - 1
ambari-common/src/main/python/resource_management/libraries/script/hook.py

@@ -26,7 +26,7 @@ import sys
 
 class Hook(Script):
   """
-  Executes a hook for a command for custom service. stdout and stderr are written to
+  Executes a hook for acommand for custom service. stdout and stderr are written to
   tmpoutfile and to tmperrfile respectively.
   """
 

+ 32 - 63
ambari-common/src/main/python/resource_management/libraries/script/script.py

@@ -85,42 +85,14 @@ class Script(object):
   4 path to file with structured command output (file will be created)
   """
   structuredOut = {}
-  
+  command_data_file = ""
+  basedir = ""
+  stroutfile = ""
+  logging_level = ""
+
   # Class variable
   tmp_dir = ""
-  
-  def __init__(self, argv=None):
-    """
-    Parses arguments and initializes variables and logging
-    """
-    # parse arguments
-    self.logger, self.chout, self.cherr = Logger.initialize_logger()
 
-    args = argv if argv else sys.argv
-    if len(args) < 7:
-      self.logger.error("Script expects at least 6 arguments, %d given" % len(args))
-      print USAGE.format(os.path.basename(args[0])) # print to stdout
-      sys.exit(1)
-
-    self.command_name = str.lower(args[1])
-    self.command_data_file = args[2]
-    self.basedir = args[3]
-    self.stroutfile = args[4]
-    self.load_structured_out()
-    self.logging_level = args[5]
-    Script.tmp_dir = args[6]
-
-    self.logging_level_str = logging._levelNames[self.logging_level]
-    self.chout.setLevel(self.logging_level_str)
-    self.logger.setLevel(self.logging_level_str)
-    
-  def load_structured_out(self):
-    Script.structuredOut = {}
-    if os.path.exists(self.stroutfile):
-      with open(self.stroutfile, 'r') as fp:
-        Script.structuredOut = json.load(fp)
-        self.logger.debug("Loaded structured out from file: %s" % str(self.stroutfile))
-  
   def get_stack_to_component(self):
     """
     To be overridden by subclasses.
@@ -129,15 +101,12 @@ class Script(object):
     return {}
 
   def put_structured_out(self, sout):
-    self.logger.debug("Adding content to structured out file: %s. New data: %s" % (str(self.stroutfile), str(sout)))
-    curr_content = Script.structuredOut.copy()
     Script.structuredOut.update(sout)
     try:
       with open(self.stroutfile, 'w') as fp:
         json.dump(Script.structuredOut, fp)
-    except IOError, err:
-      self.logger.error("Failed to write new content to structured out file: %s. Error: %s" % (str(self.stroutfile)), str(err))
-      Script.structuredOut = curr_content.copy()
+    except IOError:
+      Script.structuredOut.update({"errMsg" : "Unable to write to " + self.stroutfile})
 
   def save_component_version_to_structured_out(self, stack_name):
     """
@@ -158,8 +127,27 @@ class Script(object):
 
   def execute(self):
     """
-    Executes method relevant to command type
+    Sets up logging;
+    Parses command parameters and executes method relevant to command type
     """
+    logger, chout, cherr = Logger.initialize_logger()
+    
+    # parse arguments
+    if len(sys.argv) < 7:
+     logger.error("Script expects at least 6 arguments")
+     print USAGE.format(os.path.basename(sys.argv[0])) # print to stdout
+     sys.exit(1)
+
+    command_name = str.lower(sys.argv[1])
+    self.command_data_file = sys.argv[2]
+    self.basedir = sys.argv[3]
+    self.stroutfile = sys.argv[4]
+    self.logging_level = sys.argv[5]
+    Script.tmp_dir = sys.argv[6]
+
+    logging_level_str = logging._levelNames[self.logging_level]
+    chout.setLevel(logging_level_str)
+    logger.setLevel(logging_level_str)
 
     # on windows we need to reload some of env variables manually because there is no default paths for configs(like
     # /etc/something/conf on linux. When this env vars created by one of the Script execution, they can not be updated
@@ -174,34 +162,23 @@ class Script(object):
         #load passwords here(used on windows to impersonate different users)
         Script.passwords = {}
         for k, v in _PASSWORD_MAP.iteritems():
-          if get_path_form_configuration(k, Script.config) and get_path_form_configuration(v, Script.config):
-            Script.passwords[get_path_form_configuration(k, Script.config)] = get_path_form_configuration(v, Script.config)
+          if get_path_form_configuration(k,Script.config) and get_path_form_configuration(v,Script.config ):
+            Script.passwords[get_path_form_configuration(k,Script.config)] = get_path_form_configuration(v,Script.config)
 
     except IOError:
-      self.logger.exception("Can not read json file with command parameters: ")
+      logger.exception("Can not read json file with command parameters: ")
       sys.exit(1)
-
     # Run class method depending on a command type
     try:
-      method = self.choose_method_to_execute(self.command_name)
+      method = self.choose_method_to_execute(command_name)
       with Environment(self.basedir) as env:
         method(env)
-
-        # For start actions, try to advertise the component's version
-        if self.command_name == "start" or self.command_name == "install":
-          try:
-            import params
-            # This is to support older stacks
-            if hasattr(params, "stack_name"):
-              self.save_component_version_to_structured_out(params.stack_name)
-          except ImportError:
-            self.logger.error("Executing command %s could not import params" % str(self.command_name))
     except ClientComponentHasNoStatus or ComponentIsNotRunning:
       # Support of component status checks.
       # Non-zero exit code is interpreted as an INSTALLED status of a component
       sys.exit(1)
     except Fail:
-      self.logger.exception("Error while executing command '{0}':".format(self.command_name))
+      logger.exception("Error while executing command '{0}':".format(command_name))
       sys.exit(1)
 
 
@@ -356,14 +333,6 @@ class Script(object):
       if rolling_restart:
         self.post_rolling_restart(env)
 
-    try:
-      import params
-      if hasattr(params, "stack_name"):
-        self.save_component_version_to_structured_out(params.stack_name)
-    except ImportError:
-      self.logger.error("Restart command could not import params")
-
-
   def post_rolling_restart(self, env):
     """
     To be overridden by subclasses

+ 3 - 1
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py

@@ -36,6 +36,8 @@ class FalconClient(Script):
     env.set_params(params)
     falcon('client', action='config')
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
   def status(self, env):
     raise ClientComponentHasNoStatus()
 
@@ -49,7 +51,7 @@ class FalconClient(Script):
       return
 
     Logger.info("Executing Falcon Client Rolling Upgrade pre-restart")
-    Execute(format("hdp-select set falcon-client {version}"))
+    Execute(format("hdp-select set hadoop-client {version}"))
 
   def security_status(self, env):
     import status_params

+ 2 - 0
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py

@@ -46,6 +46,8 @@ class FalconServer(Script):
 
     falcon('server', action='start')
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
 
   def stop(self, env, rolling_restart=False):
     import params

+ 2 - 0
ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py

@@ -43,6 +43,8 @@ class FlumeHandler(Script):
 
     flume(action='start')
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
   def stop(self, env, rolling_restart=False):
     import params
 

+ 2 - 0
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py

@@ -46,6 +46,8 @@ class HbaseClient(Script):
     
     hbase(name='client')
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
   def status(self, env):
     raise ClientComponentHasNoStatus()
 

+ 2 - 0
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py

@@ -56,6 +56,8 @@ class HbaseMaster(Script):
     hbase_service( 'master',
       action = 'start'
     )
+
+    self.save_component_version_to_structured_out(params.stack_name)
     
   def stop(self, env, rolling_restart=False):
     import params

+ 2 - 0
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py

@@ -61,6 +61,8 @@ class HbaseRegionServer(Script):
       action = 'start'
     )
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
   def stop(self, env, rolling_restart=False):
     import params
     env.set_params(params)

+ 2 - 0
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py

@@ -62,6 +62,8 @@ class DataNode(Script):
     self.configure(env)
     datanode(action="start")
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
 
   def stop(self, env, rolling_restart=False):
     import params

+ 1 - 4
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py

@@ -26,10 +26,6 @@ from utils import service
 
 
 class HdfsClient(Script):
-
-  def get_stack_to_component(self):
-    return {"HDP": "hadoop-client"}
-
   def install(self, env):
     import params
 
@@ -59,6 +55,7 @@ class HdfsClient(Script):
   def config(self, env):
     import params
     hdfs()
+    pass
 
   def security_status(self, env):
     import status_params

+ 2 - 0
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py

@@ -64,6 +64,8 @@ class JournalNode(Script):
       create_log_dir=True
     )
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
   def stop(self, env, rolling_restart=False):
     import params
 

+ 2 - 0
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py

@@ -72,6 +72,8 @@ class NameNode(Script):
     self.configure(env)
     namenode(action="start", rolling_restart=rolling_restart, env=env)
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
   def post_rolling_restart(self, env):
     Logger.info("Executing Rolling Upgrade post-restart")
     import params

+ 2 - 0
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py

@@ -49,6 +49,8 @@ class SNameNode(Script):
     self.configure(env)
     snamenode(action="start")
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
   def stop(self, env, rolling_restart=False):
     import params
     env.set_params(params)

+ 3 - 4
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py

@@ -23,10 +23,6 @@ from resource_management import *
 from hcat import hcat
 
 class HCatClient(Script):
-
-  def get_stack_to_component(self):
-    return {"HDP": "hadoop-client"}
-
   def install(self, env):
     import params
     self.install_packages(env, exclude_packages=params.hive_exclude_packages)
@@ -34,9 +30,12 @@ class HCatClient(Script):
 
   def configure(self, env):
     import params
+
     env.set_params(params)
+
     hcat()
 
+
   def status(self, env):
     raise ClientComponentHasNoStatus()
 

+ 1 - 3
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py

@@ -24,9 +24,6 @@ from hive import hive
 
 class HiveClient(Script):
 
-  def get_stack_to_component(self):
-    return {"HDP": "hadoop-client"}
-
   def pre_rolling_restart(self, env):
     import params
     env.set_params(params)
@@ -45,6 +42,7 @@ class HiveClient(Script):
 
     hive(name='client')
 
+
   def status(self, env):
     raise ClientComponentHasNoStatus()
 

+ 2 - 0
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py

@@ -53,6 +53,8 @@ class HiveMetastore(Script):
     self.configure(env)  # FOR SECURITY
     hive_service('metastore', action = 'start')
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
 
   def stop(self, env, rolling_restart = False):
     import params

+ 2 - 0
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py

@@ -59,6 +59,8 @@ class HiveServer(Script):
     hive_service( 'hiveserver2', action = 'start',
       rolling_restart=rolling_restart )
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
 
   def stop(self, env, rolling_restart=False):
     import params

+ 2 - 0
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py

@@ -46,6 +46,8 @@ class WebHCatServer(Script):
     self.configure(env) # FOR SECURITY
     webhcat_service(action = 'start')
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
 
   def stop(self, env, rolling_restart=False):
     import params

+ 2 - 0
ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka_broker.py

@@ -52,6 +52,8 @@ class KafkaBroker(Script):
             user=params.kafka_user,
             not_if=no_op_test
     )
+    
+    self.save_component_version_to_structured_out(params.stack_name)
 
   def stop(self, env, rolling_restart=False):
     import params

+ 2 - 0
ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py

@@ -58,6 +58,8 @@ class KnoxGateway(Script):
             not_if=no_op_test
     )
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
   def stop(self, env):
     import params
     env.set_params(params)

+ 2 - 0
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py

@@ -41,6 +41,8 @@ class OozieClient(Script):
 
     oozie(is_server=False)
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
   def status(self, env):
     raise ClientComponentHasNoStatus()
 

+ 1 - 0
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py

@@ -61,6 +61,7 @@ class OozieServer(Script):
 
     oozie_service(action='start', rolling_restart=rolling_restart)
 
+    self.save_component_version_to_structured_out(params.stack_name)
     
   def stop(self, env, rolling_restart=False):
     import params

+ 0 - 2
ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py

@@ -26,8 +26,6 @@ from resource_management import *
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-stack_name = default("/hostLevelParams/stack_name", None)
-
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 

+ 0 - 3
ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py

@@ -26,9 +26,6 @@ from pig import pig
 
 class PigClient(Script):
 
-  def get_stack_to_component(self):
-    return {"HDP": "hadoop-client"}
-
   def pre_rolling_restart(self, env):
     import params
     env.set_params(params)

+ 2 - 0
ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py

@@ -46,6 +46,8 @@ class SliderClient(Script):
 
     slider()
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
   def status(self, env):
     raise ClientComponentHasNoStatus()
 

+ 2 - 0
ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/service_check.py

@@ -40,5 +40,7 @@ class SqoopServiceCheck(Script):
             logoutput = True
     )
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
 if __name__ == "__main__":
   SqoopServiceCheck().execute()

+ 2 - 0
ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/sqoop_client.py

@@ -38,6 +38,8 @@ class SqoopClient(Script):
     env.set_params(params)
     sqoop(type='client')
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
   def status(self, env):
     raise ClientComponentHasNoStatus()
 

+ 2 - 0
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/nimbus.py

@@ -60,6 +60,8 @@ class Nimbus(Script):
 
     service("nimbus", action="start")
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
   def stop(self, env, rolling_restart=False):
     import params
     env.set_params(params)

+ 2 - 0
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/rest_api.py

@@ -59,6 +59,8 @@ class StormRestApi(Script):
 
     service("rest_api", action="start")
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
   def stop(self, env, rolling_restart=False):
     import params
     env.set_params(params)

+ 2 - 0
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/supervisor.py

@@ -58,6 +58,8 @@ class Supervisor(Script):
     service("supervisor", action="start")
     service("logviewer", action="start")
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
   def stop(self, env, rolling_restart=False):
     import params
     env.set_params(params)

+ 0 - 2
ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py

@@ -24,8 +24,6 @@ from resource_management import *
 # server configurations
 config = Script.get_config()
 
-stack_name = default("/hostLevelParams/stack_name", None)
-
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)

+ 0 - 3
ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py

@@ -25,9 +25,6 @@ from tez import tez
 
 class TezClient(Script):
 
-  def get_stack_to_component(self):
-    return {"HDP": "hadoop-client"}
-
   def pre_rolling_restart(self, env):
     import params
     env.set_params(params)

+ 2 - 0
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py

@@ -57,6 +57,8 @@ class ApplicationTimelineServer(Script):
     self.configure(env) # FOR SECURITY
     service('timelineserver', action='start')
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
   def stop(self, env, rolling_restart=False):
     import params
     env.set_params(params)

+ 1 - 0
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py

@@ -59,6 +59,7 @@ class HistoryServer(Script):
     copy_tarballs_to_hdfs('mapreduce', 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, params.user_group)
     service('historyserver', action='start', serviceName='mapreduce')
 
+    self.save_component_version_to_structured_out(params.stack_name)
 
   def stop(self, env, rolling_restart=False):
     import params

+ 0 - 3
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py

@@ -26,9 +26,6 @@ from yarn import yarn
 
 class MapReduce2Client(Script):
 
-  def get_stack_to_component(self):
-    return {"HDP": "hadoop-client"}
-
   def pre_rolling_restart(self, env):
     import params
     env.set_params(params)

+ 2 - 0
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py

@@ -58,6 +58,8 @@ class Nodemanager(Script):
     self.configure(env) # FOR SECURITY
     service('nodemanager',action='start')
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
   def post_rolling_restart(self, env):
     Logger.info("Executing NodeManager Rolling Upgrade post-restart")
     import params

+ 2 - 0
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py

@@ -60,6 +60,8 @@ class Resourcemanager(Script):
             action='start'
     )
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
   def stop(self, env, rolling_restart=False):
     import params
 

+ 0 - 3
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py

@@ -26,9 +26,6 @@ from yarn import yarn
 
 class YarnClient(Script):
 
-  def get_stack_to_component(self):
-    return {"HDP": "hadoop-client"}
-
   def pre_rolling_restart(self, env):
     import params
     env.set_params(params)

+ 2 - 0
ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/zookeeper_client.py

@@ -39,6 +39,8 @@ class ZookeeperClient(Script):
 
     zookeeper(type='client')
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
   def status(self, env):
     raise ClientComponentHasNoStatus()
 

+ 2 - 0
ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/zookeeper_server.py

@@ -77,6 +77,8 @@ class ZookeeperServer(Script):
     self.configure(env)
     zookeeper_service(action = 'start')
 
+    self.save_component_version_to_structured_out(params.stack_name)
+
   def post_rolling_restart(self, env):
     Logger.info("Executing Rolling Upgrade post-restart")
     import params

+ 1 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py

@@ -31,7 +31,5 @@ class AfterInstallHook(Hook):
     setup_hdp_install_directory()
     setup_config()
 
-
 if __name__ == "__main__":
-  h = AfterInstallHook()
-  h.execute()
+  AfterInstallHook().execute()

+ 1 - 0
ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java

@@ -893,6 +893,7 @@ public class ClusterTest {
     checkStackVersionState(stack, version, RepositoryVersionState.INSTALLED);
 
     assertStateException(stack, version, RepositoryVersionState.CURRENT, RepositoryVersionState.INSTALLED);
+    assertStateException(stack, version, RepositoryVersionState.UPGRADED, RepositoryVersionState.INSTALLED);
     assertStateException(stack, version, RepositoryVersionState.UPGRADE_FAILED, RepositoryVersionState.INSTALLED);
     assertStateException(stack, version, RepositoryVersionState.INSTALL_FAILED, RepositoryVersionState.INSTALLED);