Browse Source

AMBARI-15609: Refactor get_stack_to_component() method (Juanjo Marron via jluniya)

Jayush Luniya 9 years ago
parent
commit
9c53dffb95
56 changed files with 123 additions and 185 deletions
  1. 6 16
      ambari-common/src/main/python/resource_management/libraries/script/script.py
  2. 2 4
      ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
  3. 2 3
      ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
  4. 2 3
      ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
  5. 2 3
      ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
  6. 2 3
      ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
  7. 2 3
      ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
  8. 2 2
      ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
  9. 2 3
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
  10. 2 3
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
  11. 2 3
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
  12. 2 3
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
  13. 6 9
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
  14. 2 3
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
  15. 2 3
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
  16. 5 9
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
  17. 2 3
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
  18. 2 3
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
  19. 2 3
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
  20. 2 3
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
  21. 2 3
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
  22. 2 3
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
  23. 2 3
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
  24. 2 3
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
  25. 2 2
      ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka_broker.py
  26. 2 3
      ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py
  27. 2 2
      ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py
  28. 2 3
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py
  29. 2 3
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
  30. 2 2
      ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
  31. 2 3
      ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
  32. 2 3
      ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_tagsync.py
  33. 2 3
      ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_usersync.py
  34. 2 2
      ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py
  35. 2 2
      ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py
  36. 2 3
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
  37. 2 3
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_client.py
  38. 2 3
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_thrift_server.py
  39. 2 2
      ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/service_check.py
  40. 2 2
      ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/sqoop_client.py
  41. 2 3
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/drpc_server.py
  42. 2 3
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/nimbus.py
  43. 2 3
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/nimbus_prod.py
  44. 2 3
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/rest_api.py
  45. 2 3
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/supervisor.py
  46. 2 3
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/supervisor_prod.py
  47. 2 3
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/ui_server.py
  48. 2 2
      ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py
  49. 2 3
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py
  50. 2 3
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
  51. 2 3
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py
  52. 2 3
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py
  53. 2 3
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
  54. 2 3
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py
  55. 2 3
      ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/zookeeper_client.py
  56. 2 3
      ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/zookeeper_server.py

+ 6 - 16
ambari-common/src/main/python/resource_management/libraries/script/script.py

@@ -110,14 +110,7 @@ class Script(object):
 
   # Class variable
   tmp_dir = ""
-
-  def get_stack_to_component(self):
-    """
-    To be overridden by subclasses.
-    Returns a dictionary where the key is a stack name, and the value is the component name used in selecting the version.
-    """
-    return {}
-    
+ 
   def load_structured_out(self):
     Script.structuredOut = {}
     if os.path.exists(self.stroutfile):
@@ -148,14 +141,11 @@ class Script(object):
       Script.structuredOut.update({"errMsg" : "Unable to write to " + self.stroutfile})
       
   def get_component_name(self):
-    stack_name = Script.get_stack_name()
-    stack_to_component = self.get_stack_to_component()
-    
-    if stack_to_component and stack_name:
-      component_name = stack_to_component[stack_name] if stack_name in stack_to_component else None
-      return component_name
-    
-    return None
+    """
+    To be overridden by subclasses.
+     Returns a string with the component name used in selecting the version.
+    """
+    pass
 
   def save_component_version_to_structured_out(self):
     """

+ 2 - 4
ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py

@@ -30,10 +30,8 @@ from accumulo_configuration import setup_conf_dir
 
 
 class AccumuloClient(Script):
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "accumulo-client"}
-
+  def get_component_name(self):
+    return "accumulo-client"
 
   def install(self, env):
     self.install_packages(env)

+ 2 - 3
ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py

@@ -51,18 +51,17 @@ class AccumuloScript(Script):
     self.component = component
 
 
-  def get_stack_to_component(self):
+  def get_component_name(self):
     """
     Gets the <stack-selector-tool> component name given the script component
     :return:  the name of the component on the stack which is used by
               <stack-selector-tool>
     """
-    import status_params
     if self.component not in self.COMPONENT_TO_STACK_SELECT_MAPPING:
       return None
 
     stack_component = self.COMPONENT_TO_STACK_SELECT_MAPPING[self.component]
-    return {status_params.stack_name: stack_component}
+    return stack_component
 
 
   def install(self, env):

+ 2 - 3
ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py

@@ -30,9 +30,8 @@ from metadata import metadata
 # todo: support rolling upgrade
 class AtlasClient(Script):
 
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "atlas-client"}
+  def get_component_name(self):
+    return "atlas-client"
 
   # ToDo: currently <stack-selector-tool> doesn't contain atlas-client, uncomment this block when
   # ToDo: atlas-client will be available

+ 2 - 3
ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py

@@ -30,9 +30,8 @@ from resource_management.libraries.functions import StackFeature
 
 class MetadataServer(Script):
 
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "atlas-server"}
+  def get_component_name(self):
+    return "atlas-server"
 
   def install(self, env):
     self.install_packages(env)

+ 2 - 3
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py

@@ -37,9 +37,8 @@ class FalconClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class FalconClientLinux(FalconClient):
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "falcon-client"}
+  def get_component_name(self):
+    return "falcon-client"
 
   def install(self, env):
     self.install_packages(env)

+ 2 - 3
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py

@@ -59,9 +59,8 @@ class FalconServer(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class FalconServerLinux(FalconServer):
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "falcon-server"}
+  def get_component_name(self):
+    return "falcon-server"
 
   def install(self, env):
     import params

+ 2 - 2
ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py

@@ -43,8 +43,8 @@ class FlumeHandler(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class FlumeHandlerLinux(FlumeHandler):
-  def get_stack_to_component(self):
-    return {default("/hostLevelParams/stack_name", None): "flume-server"}
+  def get_component_name(self):
+    return "flume-server"
 
   def install(self, env):
     import params

+ 2 - 3
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py

@@ -52,9 +52,8 @@ class HbaseClientWindows(HbaseClient):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HbaseClientDefault(HbaseClient):
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "hbase-client"}
+  def get_component_name(self):
+    return "hbase-client"
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params

+ 2 - 3
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py

@@ -70,9 +70,8 @@ class HbaseMasterWindows(HbaseMaster):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HbaseMasterDefault(HbaseMaster):
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "hbase-master"}
+  def get_component_name(self):
+    return "hbase-master"
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params

+ 2 - 3
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py

@@ -68,9 +68,8 @@ class HbaseRegionServerWindows(HbaseRegionServer):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HbaseRegionServerDefault(HbaseRegionServer):
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "hbase-regionserver"}
+  def get_component_name(self):
+    return "hbase-regionserver"
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params

+ 2 - 3
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py

@@ -34,9 +34,8 @@ class PhoenixQueryServer(Script):
     self.install_packages(env)
 
 
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "phoenix-server"}
+  def get_component_name(self):
+    return "phoenix-server"
 
 
   def configure(self, env):

+ 6 - 9
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py

@@ -32,19 +32,16 @@ from utils import get_hdfs_binary
 
 class DataNode(Script):
 
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name : "hadoop-hdfs-datanode"}
+  def get_component_name(self):
+    return "hadoop-hdfs-datanode"
 
   def get_hdfs_binary(self):
     """
-    Get the name or path to the hdfs binary depending on the stack and version.
+    Get the name or path to the hdfs binary depending on the component name.
     """
-    import status_params
-    stack_to_comp = self.get_stack_to_component()
-    if status_params.stack_name in stack_to_comp:
-      return get_hdfs_binary(stack_to_comp[status_params.stack_name])
-    return "hdfs"
+    component_name = self.get_component_name()
+    return get_hdfs_binary(component_name)
+
 
   def install(self, env):
     import params

+ 2 - 3
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py

@@ -56,9 +56,8 @@ class HdfsClient(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HdfsClientDefault(HdfsClient):
 
-  def get_stack_to_component(self):
-    import status_params
-    return { status_params.stack_name : "hadoop-client"}
+  def get_component_name(self):
+    return "hadoop-client"
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params

+ 2 - 3
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py

@@ -42,9 +42,8 @@ class JournalNode(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class JournalNodeDefault(JournalNode):
 
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name : "hadoop-hdfs-journalnode"}
+  def get_component_name(self):
+    return "hadoop-hdfs-journalnode"
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade pre-restart")

+ 5 - 9
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py

@@ -68,19 +68,15 @@ except ImportError:
 
 class NameNode(Script):
 
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name : "hadoop-hdfs-namenode"}
+  def get_component_name(self):
+    return "hadoop-hdfs-namenode"
 
   def get_hdfs_binary(self):
     """
-    Get the name or path to the hdfs binary depending on the stack and version.
+    Get the name or path to the hdfs binary depending on the component name.
     """
-    import params
-    stack_to_comp = self.get_stack_to_component()
-    if params.stack_name in stack_to_comp:
-      return get_hdfs_binary(stack_to_comp[params.stack_name])
-    return "hdfs"
+    component_name = self.get_component_name()
+    return get_hdfs_binary(component_name)
 
   def install(self, env):
     import params

+ 2 - 3
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py

@@ -32,9 +32,8 @@ from resource_management.libraries.functions.stack_features import check_stack_f
 
 class NFSGateway(Script):
 
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name : "hadoop-hdfs-nfs3"}
+  def get_component_name(self):
+    return "hadoop-hdfs-nfs3"
 
   def install(self, env):
     import params

+ 2 - 3
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py

@@ -64,9 +64,8 @@ class SNameNode(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class SNameNodeDefault(SNameNode):
 
-  def get_stack_to_component(self):
-    import status_params
-    return { status_params.stack_name : "hadoop-hdfs-secondarynamenode"}
+  def get_component_name(self):
+    return "hadoop-hdfs-secondarynamenode"
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade pre-restart")

+ 2 - 3
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py

@@ -51,12 +51,11 @@ class HCatClientWindows(HCatClient):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HCatClientDefault(HCatClient):
-  def get_stack_to_component(self):
+  def get_component_name(self):
     # HCat client doesn't have a first-class entry in <stack-selector-tool>. Since clients always
     # update after daemons, this ensures that the hcat directories are correct on hosts
     # which do not include the WebHCat daemon
-    import status_params
-    return {status_params.stack_name: "hive-webhcat"}
+    return "hive-webhcat"
 
 
   def pre_upgrade_restart(self, env, upgrade_type=None):

+ 2 - 3
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py

@@ -49,9 +49,8 @@ class HiveClientWindows(HiveClient):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveClientDefault(HiveClient):
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "hadoop-client"}
+  def get_component_name(self):
+    return "hadoop-client"
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Hive client Stack Upgrade pre-restart")

+ 2 - 3
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py

@@ -83,9 +83,8 @@ class HiveMetastoreWindows(HiveMetastore):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveMetastoreDefault(HiveMetastore):
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "hive-metastore"}
+  def get_component_name(self):
+    return "hive-metastore"
 
 
   def status(self, env):

+ 2 - 3
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py

@@ -76,9 +76,8 @@ class HiveServerWindows(HiveServer):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveServerDefault(HiveServer):
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "hive-server2"}
+  def get_component_name(self):
+    return "hive-server2"
 
   def start(self, env, upgrade_type=None):
     import params

+ 2 - 3
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py

@@ -62,9 +62,8 @@ class HiveServerInteractive(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveServerInteractiveDefault(HiveServerInteractive):
 
-    def get_stack_to_component(self):
-      import status_params
-      return {status_params.stack_name: "hive-server2-hive2"}
+    def get_component_name(self):
+      return "hive-server2-hive2"
 
     def install(self, env):
       import params

+ 2 - 3
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py

@@ -64,9 +64,8 @@ class WebHCatServerWindows(WebHCatServer):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class WebHCatServerDefault(WebHCatServer):
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "hive-webhcat"}
+  def get_component_name(self):
+    return "hive-webhcat"
 
   def status(self, env):
     import status_params

+ 2 - 2
ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka_broker.py

@@ -37,8 +37,8 @@ from setup_ranger_kafka import setup_ranger_kafka
 
 class KafkaBroker(Script):
 
-  def get_stack_to_component(self):
-    return {default("/hostLevelParams/stack_name", None) : "kafka-broker"}
+  def get_component_name(self):
+    return "kafka-broker"
 
   def install(self, env):
     self.install_packages(env)

+ 2 - 3
ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py

@@ -51,9 +51,8 @@ from resource_management.libraries.functions import StackFeature
 
 
 class KnoxGateway(Script):
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "knox-server"}
+  def get_component_name(self):
+    return "knox-server"
 
   def install(self, env):
     import params

+ 2 - 2
ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py

@@ -29,8 +29,8 @@ from resource_management.libraries.functions.default import default
 
 class MahoutClient(Script):
 
-  def get_stack_to_component(self):
-    return {default("/hostLevelParams/stack_name", None): "mahout-client"}
+  def get_component_name(self):
+    return "mahout-client"
 
 
   def pre_upgrade_restart(self, env, upgrade_type=None):

+ 2 - 3
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py

@@ -31,9 +31,8 @@ from oozie_service import oozie_service
 
 class OozieClient(Script):
 
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "oozie-client"}
+  def get_component_name(self):
+    return "oozie-client"
 
   def install(self, env):
     self.install_packages(env)

+ 2 - 3
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py

@@ -46,9 +46,8 @@ from check_oozie_server_status import check_oozie_server_status
 
 class OozieServer(Script):
 
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "oozie-server"}
+  def get_component_name(self):
+    return "oozie-server"
 
   def install(self, env):
     self.install_packages(env)

+ 2 - 2
ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py

@@ -42,8 +42,8 @@ class PigClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class PigClientLinux(PigClient):
-  def get_stack_to_component(self):
-    return {default("/hostLevelParams/stack_name", None): "hadoop-client"}
+  def get_component_name(self):
+    return "hadoop-client"
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params

+ 2 - 3
ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py

@@ -30,9 +30,8 @@ import os, errno
 
 class RangerAdmin(Script):
 
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "ranger-admin"}
+  def get_component_name(self):
+    return "ranger-admin"
 
   def install(self, env):
     self.install_packages(env)

+ 2 - 3
ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_tagsync.py

@@ -72,9 +72,8 @@ class RangerTagsync(Script):
       conf_select.select(params.stack_name, "ranger-tagsync", params.version)
       stack_select.select("ranger-tagsync", params.version)
 
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "ranger-tagsync"}
+  def get_component_name(self):
+    return "ranger-tagsync"
 
 
 if __name__ == "__main__":

+ 2 - 3
ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_usersync.py

@@ -70,9 +70,8 @@ class RangerUsersync(Script):
     env.set_params(params)
     upgrade.prestart(env, "ranger-usersync")
 
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "ranger-usersync"}
+  def get_component_name(self):
+    return "ranger-usersync"
 
 
 if __name__ == "__main__":

+ 2 - 2
ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py

@@ -31,8 +31,8 @@ import upgrade
 
 class KmsServer(Script):
 
-  def get_stack_to_component(self):
-    return {default("/hostLevelParams/stack_name", None): "ranger-kms"}
+  def get_component_name(self):
+    return "ranger-kms"
 
   def install(self, env):
     self.install_packages(env)

+ 2 - 2
ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py

@@ -34,8 +34,8 @@ class SliderClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class SliderClientLinux(SliderClient):
-  def get_stack_to_component(self):
-    return {default("/hostLevelParams/stack_name", None): "slider-client"}
+  def get_component_name(self):
+    return "slider-client"
 
   def pre_upgrade_restart(self, env,  upgrade_type=None):
     import params

+ 2 - 3
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py

@@ -68,9 +68,8 @@ class JobHistoryServer(Script):
     check_process_status(status_params.spark_history_server_pid_file)
     
 
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name : "spark-historyserver"}
+  def get_component_name(self):
+    return "spark-historyserver"
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params

+ 2 - 3
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_client.py

@@ -44,9 +44,8 @@ class SparkClient(Script):
   def status(self, env):
     raise ClientComponentHasNoStatus()
   
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name : "spark-client"}
+  def get_component_name(self):
+    return "spark-client"
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params

+ 2 - 3
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_thrift_server.py

@@ -63,9 +63,8 @@ class SparkThriftServer(Script):
     env.set_params(status_params)
     check_process_status(status_params.spark_thrift_server_pid_file)
 
-  def get_stack_to_component(self):
-    import status_params
-    return { status_params.stack_name : "spark-thriftserver"}
+  def get_component_name(self):
+    return "spark-thriftserver"
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params

+ 2 - 2
ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/service_check.py

@@ -33,8 +33,8 @@ class SqoopServiceCheck(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class SqoopServiceCheckDefault(SqoopServiceCheck):
 
-  def get_stack_to_component(self):
-    return {default("/hostLevelParams/stack_name", None): "sqoop-server"}
+  def get_component_name(self):
+    return "sqoop-server"
 
   def service_check(self, env):
     import params

+ 2 - 2
ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/sqoop_client.py

@@ -46,8 +46,8 @@ class SqoopClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class SqoopClientDefault(SqoopClient):
-  def get_stack_to_component(self):
-    return {default("/hostLevelParams/stack_name", None): "sqoop-client"}
+  def get_component_name(self):
+    return "sqoop-client"
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params

+ 2 - 3
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/drpc_server.py

@@ -36,9 +36,8 @@ from resource_management.libraries.functions.security_commons import build_expec
 
 class DrpcServer(Script):
 
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "storm-client"}
+  def get_component_name(self):
+    return "storm-client"
 
   def install(self, env):
     self.install_packages(env)

+ 2 - 3
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/nimbus.py

@@ -38,9 +38,8 @@ from ambari_commons.os_family_impl import OsFamilyImpl
 from resource_management.core.resources.service import Service
 
 class Nimbus(Script):
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name : "storm-nimbus"}
+  def get_component_name(self):
+    return "storm-nimbus"
 
   def install(self, env):
     self.install_packages(env)

+ 2 - 3
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/nimbus_prod.py

@@ -31,9 +31,8 @@ from resource_management.libraries.functions import StackFeature
 
 class Nimbus(Script):
 
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name : "storm-nimbus"}
+  def get_component_name(self):
+    return "storm-nimbus"
 
   def install(self, env):
     self.install_packages(env)

+ 2 - 3
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/rest_api.py

@@ -38,9 +38,8 @@ class StormRestApi(Script):
   In HDP 2.2, it was removed since the functionality was moved to Storm UI Server.
   """
 
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name : "storm-client"}
+  def get_component_name(self):
+    return "storm-client"
 
   def install(self, env):
     self.install_packages(env)

+ 2 - 3
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/supervisor.py

@@ -35,9 +35,8 @@ from resource_management.core.resources.service import Service
 
 
 class Supervisor(Script):
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name : "storm-supervisor"}
+  def get_component_name(self):
+    return "storm-supervisor"
 
   def install(self, env):
     self.install_packages(env)

+ 2 - 3
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/supervisor_prod.py

@@ -33,9 +33,8 @@ from resource_management.libraries.functions import StackFeature
 
 class Supervisor(Script):
 
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name : "storm-supervisor"}
+  def get_component_name(self):
+    return "storm-supervisor"
 
   def install(self, env):
     self.install_packages(env)

+ 2 - 3
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/ui_server.py

@@ -42,9 +42,8 @@ from resource_management.core.resources.service import Service
 
 class UiServer(Script):
 
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name : "storm-client"}
+  def get_component_name(self):
+    return "storm-client"
 
   def install(self, env):
     self.install_packages(env)

+ 2 - 2
ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py

@@ -50,8 +50,8 @@ class TezClient(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class TezClientLinux(TezClient):
 
-  def get_stack_to_component(self):
-    return {default("/hostLevelParams/stack_name", None) : "hadoop-client"}
+  def get_component_name(self):
+    return "hadoop-client"
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params

+ 2 - 3
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py

@@ -63,9 +63,8 @@ class ApplicationTimelineServerWindows(ApplicationTimelineServer):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class ApplicationTimelineServerDefault(ApplicationTimelineServer):
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "hadoop-yarn-timelineserver"}
+  def get_component_name(self):
+    return "hadoop-yarn-timelineserver"
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade pre-restart")

+ 2 - 3
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py

@@ -70,9 +70,8 @@ class HistoryserverWindows(HistoryServer):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HistoryServerDefault(HistoryServer):
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "hadoop-mapreduce-historyserver"}
+  def get_component_name(self):
+    return "hadoop-mapreduce-historyserver"
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade pre-restart")

+ 2 - 3
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py

@@ -51,9 +51,8 @@ class MapReduce2ClientWindows(MapReduce2Client):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class MapReduce2ClientDefault(MapReduce2Client):
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "hadoop-client"}
+  def get_component_name(self):
+    return "hadoop-client"
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params

+ 2 - 3
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py

@@ -65,9 +65,8 @@ class NodemanagerWindows(Nodemanager):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class NodemanagerDefault(Nodemanager):
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "hadoop-yarn-nodemanager"}
+  def get_component_name(self):
+    return "hadoop-yarn-nodemanager"
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing NodeManager Stack Upgrade pre-restart")

+ 2 - 3
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py

@@ -98,9 +98,8 @@ class ResourcemanagerWindows(Resourcemanager):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class ResourcemanagerDefault(Resourcemanager):
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "hadoop-yarn-resourcemanager"}
+  def get_component_name(self):
+    return "hadoop-yarn-resourcemanager"
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade post-restart")

+ 2 - 3
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py

@@ -51,9 +51,8 @@ class YarnClientWindows(YarnClient):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class YarnClientDefault(YarnClient):
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name: "hadoop-client"}
+  def get_component_name(self):
+    return "hadoop-client"
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params

+ 2 - 3
ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/zookeeper_client.py

@@ -55,9 +55,8 @@ class ZookeeperClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class ZookeeperClientLinux(ZookeeperClient):
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name : "zookeeper-client"}
+  def get_component_name(self):
+    return "zookeeper-client"
 
   def install(self, env):
     self.install_packages(env)

+ 2 - 3
ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/zookeeper_server.py

@@ -64,9 +64,8 @@ class ZookeeperServer(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class ZookeeperServerLinux(ZookeeperServer):
 
-  def get_stack_to_component(self):
-    import status_params
-    return {status_params.stack_name : "zookeeper-server"}
+  def get_component_name(self):
+    return "zookeeper-server"
 
   def install(self, env):
     self.install_packages(env)