Forráskód Böngészése

AMBARI-15329: Code Cleanup: Remove hdp hardcodings in functions, variables etc. (jluniya)

Jayush Luniya 9 éve
szülő
commit
f7221e5a60
100 módosított fájl, 483 hozzáadás és 483 törlés
  1. 2 2
      ambari-agent/src/main/python/ambari_agent/HostCheckReportFileHandler.py
  2. 2 2
      ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
  3. 13 13
      ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
  4. 11 11
      ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
  5. 16 16
      ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
  6. 3 3
      ambari-common/src/main/python/resource_management/libraries/functions/get_lzo_packages.py
  7. 20 20
      ambari-common/src/main/python/resource_management/libraries/functions/get_stack_version.py
  8. 5 5
      ambari-common/src/main/python/resource_management/libraries/functions/install_windows_msi.py
  9. 4 4
      ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin.py
  10. 14 14
      ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin_xml.py
  11. 28 28
      ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
  12. 4 4
      ambari-common/src/main/python/resource_management/libraries/functions/version.py
  13. 5 5
      ambari-common/src/main/python/resource_management/libraries/functions/version_select_util.py
  14. 25 25
      ambari-common/src/main/python/resource_management/libraries/script/script.py
  15. 3 3
      ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
  16. 11 11
      ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
  17. 6 6
      ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
  18. 3 3
      ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
  19. 4 4
      ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
  20. 2 2
      ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
  21. 3 3
      ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
  22. 3 3
      ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
  23. 6 6
      ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
  24. 1 1
      ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/status_params.py
  25. 3 3
      ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
  26. 3 3
      ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py
  27. 1 1
      ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params_linux.py
  28. 1 1
      ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py
  29. 5 5
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
  30. 5 5
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
  31. 3 3
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
  32. 1 1
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/status_params.py
  33. 4 4
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
  34. 4 4
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
  35. 3 3
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
  36. 4 4
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
  37. 4 4
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
  38. 4 4
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
  39. 10 10
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
  40. 4 4
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
  41. 3 3
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
  42. 2 2
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
  43. 2 2
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
  44. 3 3
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
  45. 6 6
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
  46. 5 5
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
  47. 3 3
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
  48. 8 8
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
  49. 8 8
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
  50. 1 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py
  51. 4 4
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
  52. 1 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
  53. 3 3
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
  54. 2 2
      ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka.py
  55. 9 9
      ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka_broker.py
  56. 8 8
      ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
  57. 2 2
      ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox.py
  58. 5 5
      ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py
  59. 9 9
      ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
  60. 1 1
      ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/status_params.py
  61. 2 2
      ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/upgrade.py
  62. 2 2
      ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py
  63. 5 5
      ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
  64. 2 2
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
  65. 3 3
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py
  66. 9 9
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
  67. 6 6
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
  68. 9 9
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
  69. 1 1
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/status_params.py
  70. 6 6
      ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
  71. 3 3
      ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
  72. 1 1
      ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
  73. 5 5
      ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
  74. 3 3
      ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
  75. 2 2
      ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py
  76. 3 3
      ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
  77. 2 2
      ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py
  78. 2 2
      ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
  79. 3 3
      ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
  80. 1 1
      ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py
  81. 1 1
      ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider.py
  82. 4 4
      ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py
  83. 5 5
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
  84. 9 9
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
  85. 3 3
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
  86. 4 4
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_client.py
  87. 4 4
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
  88. 4 4
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_thrift_server.py
  89. 3 3
      ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py
  90. 4 4
      ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/sqoop_client.py
  91. 4 4
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/drpc_server.py
  92. 5 5
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/nimbus.py
  93. 5 5
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/nimbus_prod.py
  94. 5 5
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
  95. 1 1
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_windows.py
  96. 2 2
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/rest_api.py
  97. 1 1
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/status_params.py
  98. 1 1
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/storm.py
  99. 5 5
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/supervisor.py
  100. 5 5
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/supervisor_prod.py

+ 2 - 2
ambari-agent/src/main/python/ambari_agent/HostCheckReportFileHandler.py

@@ -87,7 +87,7 @@ class HostCheckReportFileHandler:
       logger.error("Can't write host check file at %s :%s " % (self.hostCheckCustomActionsFilePath, err.message))
       logger.error("Can't write host check file at %s :%s " % (self.hostCheckCustomActionsFilePath, err.message))
       traceback.print_exc()
       traceback.print_exc()
 
 
-  def _hdp_list_directory(self):
+  def _stack_list_directory(self):
     """
     """
     Return filtered list of /usr/hdp directory allowed to be removed
     Return filtered list of /usr/hdp directory allowed to be removed
     :rtype list
     :rtype list
@@ -152,7 +152,7 @@ class HostCheckReportFileHandler:
         items = []
         items = []
         for itemDetail in hostInfo['stackFoldersAndFiles']:
         for itemDetail in hostInfo['stackFoldersAndFiles']:
           items.append(itemDetail['name'])
           items.append(itemDetail['name'])
-        items += self._hdp_list_directory()
+        items += self._stack_list_directory()
         config.add_section('directories')
         config.add_section('directories')
         config.set('directories', 'dir_list', ','.join(items))
         config.set('directories', 'dir_list', ','.join(items))
 
 

+ 2 - 2
ambari-common/src/main/python/resource_management/libraries/functions/__init__.py

@@ -38,7 +38,7 @@ from resource_management.libraries.functions.hive_check import *
 from resource_management.libraries.functions.version import *
 from resource_management.libraries.functions.version import *
 from resource_management.libraries.functions.format_jvm_option import *
 from resource_management.libraries.functions.format_jvm_option import *
 from resource_management.libraries.functions.constants import *
 from resource_management.libraries.functions.constants import *
-from resource_management.libraries.functions.get_hdp_version import *
+from resource_management.libraries.functions.get_stack_version import *
 from resource_management.libraries.functions.get_lzo_packages import *
 from resource_management.libraries.functions.get_lzo_packages import *
 from resource_management.libraries.functions.setup_ranger_plugin import *
 from resource_management.libraries.functions.setup_ranger_plugin import *
 from resource_management.libraries.functions.curl_krb_request import *
 from resource_management.libraries.functions.curl_krb_request import *
@@ -47,6 +47,6 @@ IS_WINDOWS = platform.system() == "Windows"
 
 
 if IS_WINDOWS:
 if IS_WINDOWS:
   from resource_management.libraries.functions.windows_service_utils import *
   from resource_management.libraries.functions.windows_service_utils import *
-  from resource_management.libraries.functions.install_hdp_msi import *
+  from resource_management.libraries.functions.install_stack_msi import *
   from resource_management.libraries.functions.install_jdbc_driver import *
   from resource_management.libraries.functions.install_jdbc_driver import *
   from resource_management.libraries.functions.reload_windows_env import *
   from resource_management.libraries.functions.reload_windows_env import *

+ 13 - 13
ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py

@@ -22,7 +22,7 @@ __all__ = ["select", "create", "get_hadoop_conf_dir", "get_hadoop_dir"]
 
 
 import os
 import os
 import version
 import version
-import hdp_select
+import stack_select
 import subprocess
 import subprocess
 
 
 from resource_management.core import shell
 from resource_management.core import shell
@@ -34,7 +34,7 @@ from resource_management.core.resources.system import Execute
 from resource_management.core.resources.system import Link
 from resource_management.core.resources.system import Link
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.core.exceptions import Fail
 from resource_management.core.exceptions import Fail
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from resource_management.core.shell import as_sudo
 from resource_management.core.shell import as_sudo
 
 
 
 
@@ -192,7 +192,7 @@ def _valid(stack_name, package, ver):
   if stack_name != "HDP":
   if stack_name != "HDP":
     return False
     return False
 
 
-  if version.compare_versions(version.format_hdp_stack_version(ver), "2.3.0.0") < 0:
+  if version.compare_versions(version.format_stack_version(ver), "2.3.0.0") < 0:
     return False
     return False
 
 
   return True
   return True
@@ -298,10 +298,10 @@ def get_hadoop_conf_dir(force_latest_on_upgrade=False):
 
 
   if not Script.in_stack_upgrade():
   if not Script.in_stack_upgrade():
     # During normal operation, the HDP stack must be 2.3 or higher
     # During normal operation, the HDP stack must be 2.3 or higher
-    if Script.is_hdp_stack_greater_or_equal("2.2"):
+    if Script.is_stack_greater_or_equal("2.2"):
       hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
       hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
 
 
-    if Script.is_hdp_stack_greater_or_equal("2.3"):
+    if Script.is_stack_greater_or_equal("2.3"):
       hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
       hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
       stack_name = default("/hostLevelParams/stack_name", None)
       stack_name = default("/hostLevelParams/stack_name", None)
       version = default("/commandParams/version", None)
       version = default("/commandParams/version", None)
@@ -326,16 +326,16 @@ def get_hadoop_conf_dir(force_latest_on_upgrade=False):
     EU/RU | 2.3    | 2.3.*  | Any                   | Use /usr/hdp/$version/hadoop/conf, which should be a symlink destination
     EU/RU | 2.3    | 2.3.*  | Any                   | Use /usr/hdp/$version/hadoop/conf, which should be a symlink destination
     '''
     '''
 
 
-    # The method "is_hdp_stack_greater_or_equal" uses "stack_version" which is the desired stack, e.g., 2.2 or 2.3
+    # The method "is_stack_greater_or_equal" uses "stack_version" which is the desired stack, e.g., 2.2 or 2.3
     # In an RU, it is always the desired stack, and doesn't change even during the Downgrade!
     # In an RU, it is always the desired stack, and doesn't change even during the Downgrade!
     # In an RU Downgrade from HDP 2.3 to 2.2, the first thing we do is 
     # In an RU Downgrade from HDP 2.3 to 2.2, the first thing we do is 
     # rm /etc/[component]/conf and then mv /etc/[component]/conf.backup /etc/[component]/conf
     # rm /etc/[component]/conf and then mv /etc/[component]/conf.backup /etc/[component]/conf
-    if Script.is_hdp_stack_greater_or_equal("2.2"):
+    if Script.is_stack_greater_or_equal("2.2"):
       hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
       hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
 
 
       # This contains the "version", including the build number, that is actually used during a stack upgrade and
       # This contains the "version", including the build number, that is actually used during a stack upgrade and
       # is the version upgrading/downgrading to.
       # is the version upgrading/downgrading to.
-      stack_info = hdp_select._get_upgrade_stack()
+      stack_info = stack_select._get_upgrade_stack()
 
 
       if stack_info is not None:
       if stack_info is not None:
         stack_name = stack_info[0]
         stack_name = stack_info[0]
@@ -345,14 +345,14 @@ def get_hadoop_conf_dir(force_latest_on_upgrade=False):
       
       
       Logger.info("In the middle of a stack upgrade/downgrade for Stack {0} and destination version {1}, determining which hadoop conf dir to use.".format(stack_name, version))
       Logger.info("In the middle of a stack upgrade/downgrade for Stack {0} and destination version {1}, determining which hadoop conf dir to use.".format(stack_name, version))
       # This is the version either upgrading or downgrading to.
       # This is the version either upgrading or downgrading to.
-      if compare_versions(format_hdp_stack_version(version), "2.3.0.0") >= 0:
+      if compare_versions(format_stack_version(version), "2.3.0.0") >= 0:
         # Determine if hdp-select has been run and if not, then use the current
         # Determine if hdp-select has been run and if not, then use the current
         # hdp version until this component is upgraded.
         # hdp version until this component is upgraded.
         if not force_latest_on_upgrade:
         if not force_latest_on_upgrade:
-          current_hdp_version = hdp_select.get_role_component_current_hdp_version()
-          if current_hdp_version is not None and version != current_hdp_version:
-            version = current_hdp_version
-            Logger.info("hdp-select has not yet been called to update the symlink for this component, keep using version {0}".format(current_hdp_version))
+          current_stack_version = stack_select.get_role_component_current_stack_version()
+          if current_stack_version is not None and version != current_stack_version:
+            version = current_stack_version
+            Logger.info("hdp-select has not yet been called to update the symlink for this component, keep using version {0}".format(current_stack_version))
 
 
         # Only change the hadoop_conf_dir path, don't conf-select this older version
         # Only change the hadoop_conf_dir path, don't conf-select this older version
         hadoop_conf_dir = "/usr/hdp/{0}/hadoop/conf".format(version)
         hadoop_conf_dir = "/usr/hdp/{0}/hadoop/conf".format(version)

+ 11 - 11
ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py

@@ -61,7 +61,7 @@ TARBALL_MAP = {
 }
 }
 
 
 
 
-def _get_single_version_from_hdp_select():
+def _get_single_version_from_stack_select():
   """
   """
   Call "hdp-select versions" and return the version string if only one version is available.
   Call "hdp-select versions" and return the version string if only one version is available.
   :return: Returns a version string if successful, and None otherwise.
   :return: Returns a version string if successful, and None otherwise.
@@ -70,12 +70,12 @@ def _get_single_version_from_hdp_select():
   tmpfile = tempfile.NamedTemporaryFile()
   tmpfile = tempfile.NamedTemporaryFile()
   tmp_dir = Script.get_tmp_dir()
   tmp_dir = Script.get_tmp_dir()
   tmp_file = os.path.join(tmp_dir, "copy_tarball_out.txt")
   tmp_file = os.path.join(tmp_dir, "copy_tarball_out.txt")
-  hdp_version = None
+  stack_version = None
 
 
   out = None
   out = None
-  get_hdp_versions_cmd = "/usr/bin/hdp-select versions > {0}".format(tmp_file)
+  get_stack_versions_cmd = "/usr/bin/hdp-select versions > {0}".format(tmp_file)
   try:
   try:
-    code, stdoutdata = shell.call(get_hdp_versions_cmd, logoutput=True)
+    code, stdoutdata = shell.call(get_stack_versions_cmd, logoutput=True)
     with open(tmp_file, 'r+') as file:
     with open(tmp_file, 'r+') as file:
       out = file.read()
       out = file.read()
   except Exception, e:
   except Exception, e:
@@ -88,17 +88,17 @@ def _get_single_version_from_hdp_select():
       Logger.logger.exception("Could not remove file {0}. Error: {1}".format(str(tmp_file), str(e)))
       Logger.logger.exception("Could not remove file {0}. Error: {1}".format(str(tmp_file), str(e)))
 
 
   if code != 0 or out is None or out == "":
   if code != 0 or out is None or out == "":
-    Logger.error("Could not verify HDP version by calling '{0}'. Return Code: {1}, Output: {2}.".format(get_hdp_versions_cmd, str(code), str(out)))
+    Logger.error("Could not verify HDP version by calling '{0}'. Return Code: {1}, Output: {2}.".format(get_stack_versions_cmd, str(code), str(out)))
     return None
     return None
 
 
   matches = re.findall(r"([\d\.]+\-\d+)", out)
   matches = re.findall(r"([\d\.]+\-\d+)", out)
 
 
   if matches and len(matches) == 1:
   if matches and len(matches) == 1:
-    hdp_version = matches[0]
+    stack_version = matches[0]
   elif matches and len(matches) > 1:
   elif matches and len(matches) > 1:
     Logger.error("Found multiple matches for HDP version, cannot identify the correct one from: {0}".format(", ".join(matches)))
     Logger.error("Found multiple matches for HDP version, cannot identify the correct one from: {0}".format(", ".join(matches)))
 
 
-  return hdp_version
+  return stack_version
 
 
 def copy_to_hdfs(name, user_group, owner, file_mode=0444, custom_source_file=None, custom_dest_file=None, force_execute=False,
 def copy_to_hdfs(name, user_group, owner, file_mode=0444, custom_source_file=None, custom_dest_file=None, force_execute=False,
                  use_upgrading_version_during_uprade=True, replace_existing_files=False, host_sys_prepped=False):
                  use_upgrading_version_during_uprade=True, replace_existing_files=False, host_sys_prepped=False):
@@ -152,10 +152,10 @@ def copy_to_hdfs(name, user_group, owner, file_mode=0444, custom_source_file=Non
     if current_version is None:
     if current_version is None:
       # During normal operation, the first installation of services won't yet know about the version, so must rely
       # During normal operation, the first installation of services won't yet know about the version, so must rely
       # on hdp-select to get it.
       # on hdp-select to get it.
-      hdp_version = _get_single_version_from_hdp_select()
-      if hdp_version:
-        Logger.info("Will use stack version {0}".format(hdp_version))
-        current_version = hdp_version
+      stack_version = _get_single_version_from_stack_select()
+      if stack_version:
+        Logger.info("Will use stack version {0}".format(stack_version))
+        current_version = stack_version
 
 
   if current_version is None:
   if current_version is None:
     message_suffix = "during rolling %s" % str(upgrade_direction) if is_stack_upgrade else ""
     message_suffix = "during rolling %s" % str(upgrade_direction) if is_stack_upgrade else ""

+ 16 - 16
ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py

@@ -35,10 +35,10 @@ from resource_management.core import shell
 
 
 """
 """
 This file provides helper methods needed for the versioning of RPMs. Specifically, it does dynamic variable
 This file provides helper methods needed for the versioning of RPMs. Specifically, it does dynamic variable
-interpretation to replace strings like {{ hdp_stack_version }}  where the value of the
+interpretation to replace strings like {{ stack_version_formatted }}  where the value of the
 variables cannot be determined ahead of time, but rather, depends on what files are found.
 variables cannot be determined ahead of time, but rather, depends on what files are found.
 
 
-It assumes that {{ hdp_stack_version }} is constructed as ${major.minor.patch.rev}-${build_number}
+It assumes that {{ stack_version_formatted }} is constructed as ${major.minor.patch.rev}-${build_number}
 E.g., 998.2.2.1.0-998
 E.g., 998.2.2.1.0-998
 Please note that "-${build_number}" is optional.
 Please note that "-${build_number}" is optional.
 """
 """
@@ -54,10 +54,10 @@ def _get_tar_source_and_dest_folder(tarball_prefix):
   :return: Returns a tuple of (x, y) after verifying the properties
   :return: Returns a tuple of (x, y) after verifying the properties
   """
   """
   component_tar_source_file = default("/configurations/cluster-env/%s%s" % (tarball_prefix.lower(), TAR_SOURCE_SUFFIX), None)
   component_tar_source_file = default("/configurations/cluster-env/%s%s" % (tarball_prefix.lower(), TAR_SOURCE_SUFFIX), None)
-  # E.g., /usr/hdp/current/hadoop-client/tez-{{ hdp_stack_version }}.tar.gz
+  # E.g., /usr/hdp/current/hadoop-client/tez-{{ stack_version_formatted }}.tar.gz
 
 
   component_tar_destination_folder = default("/configurations/cluster-env/%s%s" % (tarball_prefix.lower(), TAR_DESTINATION_FOLDER_SUFFIX), None)
   component_tar_destination_folder = default("/configurations/cluster-env/%s%s" % (tarball_prefix.lower(), TAR_DESTINATION_FOLDER_SUFFIX), None)
-  # E.g., hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/
+  # E.g., hdfs:///hdp/apps/{{ stack_version_formatted }}/mapreduce/
 
 
   if not component_tar_source_file or not component_tar_destination_folder:
   if not component_tar_source_file or not component_tar_destination_folder:
     Logger.warning("Did not find %s tar source file and destination folder properties in cluster-env.xml" %
     Logger.warning("Did not find %s tar source file and destination folder properties in cluster-env.xml" %
@@ -137,10 +137,10 @@ def _copy_files(source_and_dest_pairs, component_user, file_owner, group_owner,
   return return_value
   return return_value
 
 
 
 
-def copy_tarballs_to_hdfs(tarball_prefix, hdp_select_component_name, component_user, file_owner, group_owner, ignore_sysprep=False):
+def copy_tarballs_to_hdfs(tarball_prefix, stack_select_component_name, component_user, file_owner, group_owner, ignore_sysprep=False):
   """
   """
   :param tarball_prefix: Prefix of the tarball must be one of tez, hive, mr, pig
   :param tarball_prefix: Prefix of the tarball must be one of tez, hive, mr, pig
-  :param hdp_select_component_name: Component name to get the status to determine the version
+  :param stack_select_component_name: Component name to get the status to determine the version
   :param component_user: User that will execute the Hadoop commands, usually smokeuser
   :param component_user: User that will execute the Hadoop commands, usually smokeuser
   :param file_owner: Owner of the files copied to HDFS (typically hdfs user)
   :param file_owner: Owner of the files copied to HDFS (typically hdfs user)
   :param group_owner: Group owner of the files copied to HDFS (typically hadoop group)
   :param group_owner: Group owner of the files copied to HDFS (typically hadoop group)
@@ -148,17 +148,17 @@ def copy_tarballs_to_hdfs(tarball_prefix, hdp_select_component_name, component_u
   :return: Returns 0 on success, 1 if no files were copied, and in some cases may raise an exception.
   :return: Returns 0 on success, 1 if no files were copied, and in some cases may raise an exception.
 
 
   In order to call this function, params.py must have all of the following,
   In order to call this function, params.py must have all of the following,
-  hdp_stack_version, kinit_path_local, security_enabled, hdfs_user, hdfs_principal_name, hdfs_user_keytab,
+  stack_version_formatted, kinit_path_local, security_enabled, hdfs_user, hdfs_principal_name, hdfs_user_keytab,
   hadoop_bin_dir, hadoop_conf_dir, and HdfsDirectory as a partial function.
   hadoop_bin_dir, hadoop_conf_dir, and HdfsDirectory as a partial function.
   """
   """
   import params
   import params
 
 
   if not ignore_sysprep and hasattr(params, "host_sys_prepped") and params.host_sys_prepped:
   if not ignore_sysprep and hasattr(params, "host_sys_prepped") and params.host_sys_prepped:
-    Logger.info("Host is sys-prepped. Tarball %s will not be copied for %s." % (tarball_prefix, hdp_select_component_name))
+    Logger.info("Host is sys-prepped. Tarball %s will not be copied for %s." % (tarball_prefix, stack_select_component_name))
     return 0
     return 0
 
 
-  if not hasattr(params, "hdp_stack_version") or params.hdp_stack_version is None:
-    Logger.warning("Could not find hdp_stack_version")
+  if not hasattr(params, "stack_version_formatted") or params.stack_version_formatted is None:
+    Logger.warning("Could not find stack_version_formatted")
     return 1
     return 1
 
 
   component_tar_source_file, component_tar_destination_folder = _get_tar_source_and_dest_folder(tarball_prefix)
   component_tar_source_file, component_tar_destination_folder = _get_tar_source_and_dest_folder(tarball_prefix)
@@ -174,25 +174,25 @@ def copy_tarballs_to_hdfs(tarball_prefix, hdp_select_component_name, component_u
   tmpfile = tempfile.NamedTemporaryFile()
   tmpfile = tempfile.NamedTemporaryFile()
   out = None
   out = None
   with open(tmpfile.name, 'r+') as file:
   with open(tmpfile.name, 'r+') as file:
-    get_hdp_version_cmd = '/usr/bin/hdp-select status %s > %s' % (hdp_select_component_name, tmpfile.name)
-    code, stdoutdata = shell.call(get_hdp_version_cmd)
+    get_stack_version_cmd = '/usr/bin/hdp-select status %s > %s' % (stack_select_component_name, tmpfile.name)
+    code, stdoutdata = shell.call(get_stack_version_cmd)
     out = file.read()
     out = file.read()
   pass
   pass
   if code != 0 or out is None:
   if code != 0 or out is None:
     Logger.warning("Could not verify HDP version by calling '%s'. Return Code: %s, Output: %s." %
     Logger.warning("Could not verify HDP version by calling '%s'. Return Code: %s, Output: %s." %
-                   (get_hdp_version_cmd, str(code), str(out)))
+                   (get_stack_version_cmd, str(code), str(out)))
     return 1
     return 1
 
 
   matches = re.findall(r"([\d\.]+\-\d+)", out)
   matches = re.findall(r"([\d\.]+\-\d+)", out)
-  hdp_version = matches[0] if matches and len(matches) > 0 else None
+  stack_version = matches[0] if matches and len(matches) > 0 else None
 
 
-  if not hdp_version:
+  if not stack_version:
     Logger.error("Could not parse HDP version from output of hdp-select: %s" % str(out))
     Logger.error("Could not parse HDP version from output of hdp-select: %s" % str(out))
     return 1
     return 1
 
 
   file_name = os.path.basename(component_tar_source_file)
   file_name = os.path.basename(component_tar_source_file)
   destination_file = os.path.join(component_tar_destination_folder, file_name)
   destination_file = os.path.join(component_tar_destination_folder, file_name)
-  destination_file = destination_file.replace("{{ hdp_stack_version }}", hdp_version)
+  destination_file = destination_file.replace("{{ stack_version_formatted }}", stack_version)
 
 
   does_hdfs_file_exist_cmd = "fs -ls %s" % destination_file
   does_hdfs_file_exist_cmd = "fs -ls %s" % destination_file
 
 

+ 3 - 3
ambari-common/src/main/python/resource_management/libraries/functions/get_lzo_packages.py

@@ -22,7 +22,7 @@ Ambari Agent
 __all__ = ["get_lzo_packages"]
 __all__ = ["get_lzo_packages"]
 
 
 from ambari_commons.os_check import OSCheck
 from ambari_commons.os_check import OSCheck
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
 
 
 def get_lzo_packages(stack_version_unformatted):
 def get_lzo_packages(stack_version_unformatted):
@@ -35,9 +35,9 @@ def get_lzo_packages(stack_version_unformatted):
     
     
   underscored_version = stack_version_unformatted.replace('.', '_')
   underscored_version = stack_version_unformatted.replace('.', '_')
   dashed_version = stack_version_unformatted.replace('.', '-')
   dashed_version = stack_version_unformatted.replace('.', '-')
-  hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+  stack_version_formatted = format_stack_version(stack_version_unformatted)
 
 
-  if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
+  if stack_version_formatted != "" and compare_versions(stack_version_formatted, '2.2') >= 0:
     lzo_packages += ["hadooplzo_*"]
     lzo_packages += ["hadooplzo_*"]
   else:
   else:
     lzo_packages += ["hadoop-lzo"]
     lzo_packages += ["hadoop-lzo"]

+ 20 - 20
ambari-common/src/main/python/resource_management/libraries/functions/get_hdp_version.py → ambari-common/src/main/python/resource_management/libraries/functions/get_stack_version.py

@@ -19,7 +19,7 @@ limitations under the License.
 Ambari Agent
 Ambari Agent
 
 
 """
 """
-__all__ = ["get_hdp_version"]
+__all__ = ["get_stack_version"]
 
 
 import os
 import os
 import re
 import re
@@ -31,27 +31,27 @@ from resource_management.core.logger import Logger
 from resource_management.core.exceptions import Fail
 from resource_management.core.exceptions import Fail
 from resource_management.core import shell
 from resource_management.core import shell
 
 
-HDP_SELECT_BINARY = "/usr/bin/hdp-select"
+STACK_SELECT_BINARY = "/usr/bin/hdp-select"
 
 
 @OsFamilyFuncImpl(OSConst.WINSRV_FAMILY)
 @OsFamilyFuncImpl(OSConst.WINSRV_FAMILY)
-def get_hdp_version(package_name):
+def get_stack_version(package_name):
   """
   """
-  @param package_name, name of the package, from which, function will try to get hdp version
+  @param package_name, name of the package, from which, function will try to get stack version
   """
   """
   try:
   try:
     component_home_dir = os.environ[package_name.upper() + "_HOME"]
     component_home_dir = os.environ[package_name.upper() + "_HOME"]
   except KeyError:
   except KeyError:
-    Logger.info('Skipping get_hdp_version since the component {0} is not yet available'.format(package_name))
+    Logger.info('Skipping get_stack_version since the component {0} is not yet available'.format(package_name))
     return None # lazy fail
     return None # lazy fail
 
 
-  #As a rule, component_home_dir is of the form <hdp_root_dir>\[\]<component_versioned_subdir>[\]
+  #As a rule, component_home_dir is of the form <stack_root_dir>\[\]<component_versioned_subdir>[\]
   home_dir_split = os.path.split(component_home_dir)
   home_dir_split = os.path.split(component_home_dir)
   iSubdir = len(home_dir_split) - 1
   iSubdir = len(home_dir_split) - 1
   while not home_dir_split[iSubdir]:
   while not home_dir_split[iSubdir]:
     iSubdir -= 1
     iSubdir -= 1
 
 
-  #The component subdir is expected to be of the form <package_name>-<package_version>.<hdp_stack_version>
-  # with package_version = #.#.# and hdp_stack_version=#.#.#.#-<build_number>
+  #The component subdir is expected to be of the form <package_name>-<package_version>.<stack_version>
+  # with package_version = #.#.# and stack_version=#.#.#.#-<build_number>
   match = re.findall('[0-9]+.[0-9]+.[0-9]+.[0-9]+-[0-9]+', home_dir_split[iSubdir])
   match = re.findall('[0-9]+.[0-9]+.[0-9]+.[0-9]+-[0-9]+', home_dir_split[iSubdir])
   if not match:
   if not match:
     Logger.info('Failed to get extracted version for component {0}. Home dir not in expected format.'.format(package_name))
     Logger.info('Failed to get extracted version for component {0}. Home dir not in expected format.'.format(package_name))
@@ -60,32 +60,32 @@ def get_hdp_version(package_name):
   return match[0]
   return match[0]
 
 
 @OsFamilyFuncImpl(OsFamilyImpl.DEFAULT)
 @OsFamilyFuncImpl(OsFamilyImpl.DEFAULT)
-def get_hdp_version(package_name):
+def get_stack_version(package_name):
   """
   """
-  @param package_name, name of the package, from which, function will try to get hdp version
+  @param package_name, name of the package, from which, function will try to get stack version
   """
   """
   
   
-  if not os.path.exists(HDP_SELECT_BINARY):
-    Logger.info('Skipping get_hdp_version since hdp-select is not yet available')
+  if not os.path.exists(STACK_SELECT_BINARY):
+    Logger.info('Skipping get_stack_version since " + STACK_SELECT_BINARY + " is not yet available')
     return None # lazy fail
     return None # lazy fail
   
   
   try:
   try:
-    command = 'ambari-python-wrap {HDP_SELECT_BINARY} status {package_name}'.format(HDP_SELECT_BINARY=HDP_SELECT_BINARY, package_name=package_name)
-    return_code, hdp_output = shell.call(command, timeout=20)
+    command = 'ambari-python-wrap {STACK_SELECT_BINARY} status {package_name}'.format(STACK_SELECT_BINARY=STACK_SELECT_BINARY, package_name=package_name)
+    return_code, stack_output = shell.call(command, timeout=20)
   except Exception, e:
   except Exception, e:
     Logger.error(str(e))
     Logger.error(str(e))
-    raise Fail('Unable to execute hdp-select command to retrieve the version.')
+    raise Fail('Unable to execute " + STACK_SELECT_BINARY + " command to retrieve the version.')
 
 
   if return_code != 0:
   if return_code != 0:
     raise Fail(
     raise Fail(
       'Unable to determine the current version because of a non-zero return code of {0}'.format(str(return_code)))
       'Unable to determine the current version because of a non-zero return code of {0}'.format(str(return_code)))
 
 
-  hdp_version = re.sub(package_name + ' - ', '', hdp_output)
-  hdp_version = hdp_version.rstrip()
-  match = re.match('[0-9]+.[0-9]+.[0-9]+.[0-9]+-[0-9]+', hdp_version)
+  stack_version = re.sub(package_name + ' - ', '', stack_output)
+  stack_version = stack_output.rstrip()
+  match = re.match('[0-9]+.[0-9]+.[0-9]+.[0-9]+-[0-9]+', stack_version)
 
 
   if match is None:
   if match is None:
-    Logger.info('Failed to get extracted version with hdp-select')
+    Logger.info('Failed to get extracted version with ' + STACK_SELECT_BINARY)
     return None # lazy fail
     return None # lazy fail
 
 
-  return hdp_version
+  return stack_version

+ 5 - 5
ambari-common/src/main/python/resource_management/libraries/functions/install_hdp_msi.py → ambari-common/src/main/python/resource_management/libraries/functions/install_windows_msi.py

@@ -28,7 +28,7 @@ from resource_management.core.logger import Logger
 from resource_management.core.exceptions import Fail
 from resource_management.core.exceptions import Fail
 from resource_management.libraries.functions.reload_windows_env import reload_windows_env
 from resource_management.libraries.functions.reload_windows_env import reload_windows_env
 from resource_management.libraries.functions.windows_service_utils import check_windows_service_exists
 from resource_management.libraries.functions.windows_service_utils import check_windows_service_exists
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions.version import format_stack_version, compare_versions
 import socket
 import socket
 import os
 import os
 import glob
 import glob
@@ -120,8 +120,8 @@ def _create_symlinks(stack_version):
   # folders
   # folders
   Execute("cmd /c mklink /d %HADOOP_NODE%\\hadoop %HADOOP_HOME%")
   Execute("cmd /c mklink /d %HADOOP_NODE%\\hadoop %HADOOP_HOME%")
   Execute("cmd /c mklink /d %HADOOP_NODE%\\hive %HIVE_HOME%")
   Execute("cmd /c mklink /d %HADOOP_NODE%\\hive %HIVE_HOME%")
-  hdp_stack_version = format_hdp_stack_version(stack_version)
-  if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
+  stack_version_formatted = format_stack_version(stack_version)
+  if stack_version_formatted != "" and compare_versions(stack_version_formatted, '2.2') >= 0:
     Execute("cmd /c mklink /d %HADOOP_NODE%\\knox %KNOX_HOME%")
     Execute("cmd /c mklink /d %HADOOP_NODE%\\knox %KNOX_HOME%")
   # files pairs (symlink_path, path_template_to_target_file), use * to replace file version
   # files pairs (symlink_path, path_template_to_target_file), use * to replace file version
   links_pairs = [
   links_pairs = [
@@ -173,9 +173,9 @@ def install_windows_msi(url_base, save_dir, save_files, hadoop_user, hadoop_pass
       Logger.info("hdp.msi already installed")
       Logger.info("hdp.msi already installed")
       return
       return
 
 
-    hdp_stack_version = format_hdp_stack_version(stack_version)
+    stack_version_formatted = format_stack_version(stack_version)
     hdp_22_specific_props = ''
     hdp_22_specific_props = ''
-    if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
+    if stack_version_formatted != "" and compare_versions(stack_version_formatted, '2.2') >= 0:
       hdp_22_specific_props = hdp_22.format(hdp_data_dir=hdp_data_dir)
       hdp_22_specific_props = hdp_22.format(hdp_data_dir=hdp_data_dir)
 
 
     # MSIs cannot be larger than 2GB. HDPWIN 2.3 needed split in order to accommodate this limitation
     # MSIs cannot be larger than 2GB. HDPWIN 2.3 needed split in order to accommodate this limitation

+ 4 - 4
ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin.py

@@ -24,7 +24,7 @@ from datetime import datetime
 from resource_management.libraries.functions.ranger_functions import Rangeradmin
 from resource_management.libraries.functions.ranger_functions import Rangeradmin
 from resource_management.core.resources import File, Execute
 from resource_management.core.resources import File, Execute
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions.get_hdp_version import get_hdp_version
+from resource_management.libraries.functions.get_stack_version import get_stack_version
 from resource_management.core.logger import Logger
 from resource_management.core.logger import Logger
 from resource_management.core.source import DownloadSource
 from resource_management.core.source import DownloadSource
 from resource_management.libraries.resources import ModifyPropertiesFile
 from resource_management.libraries.resources import ModifyPropertiesFile
@@ -50,8 +50,8 @@ def setup_ranger_plugin(component_select_name, service_name,
 
 
   File(driver_curl_target, mode=0644)
   File(driver_curl_target, mode=0644)
 
 
-  hdp_version = get_hdp_version(component_select_name)
-  file_path = format('/usr/hdp/{hdp_version}/ranger-{service_name}-plugin/install.properties')
+  stack_version = get_stack_version(component_select_name)
+  file_path = format('/usr/hdp/{stack_version}/ranger-{service_name}-plugin/install.properties')
   
   
   if not os.path.isfile(file_path):
   if not os.path.isfile(file_path):
     raise Fail(format('Ranger {service_name} plugin install.properties file does not exist at {file_path}'))
     raise Fail(format('Ranger {service_name} plugin install.properties file does not exist at {file_path}'))
@@ -79,7 +79,7 @@ def setup_ranger_plugin(component_select_name, service_name,
   else:
   else:
     cmd = (format('disable-{service_name}-plugin.sh'),)
     cmd = (format('disable-{service_name}-plugin.sh'),)
     
     
-  cmd_env = {'JAVA_HOME': java_home, 'PWD': format('/usr/hdp/{hdp_version}/ranger-{service_name}-plugin'), 'PATH': format('/usr/hdp/{hdp_version}/ranger-{service_name}-plugin')}
+  cmd_env = {'JAVA_HOME': java_home, 'PWD': format('/usr/hdp/{stack_version}/ranger-{service_name}-plugin'), 'PATH': format('/usr/hdp/{stack_version}/ranger-{service_name}-plugin')}
   
   
   Execute(cmd, 
   Execute(cmd, 
         environment=cmd_env, 
         environment=cmd_env, 

+ 14 - 14
ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin_xml.py

@@ -26,7 +26,7 @@ from resource_management.libraries.functions.ranger_functions import Rangeradmin
 from resource_management.core.resources import File, Directory, Execute
 from resource_management.core.resources import File, Directory, Execute
 from resource_management.libraries.resources.xml_config import XmlConfig
 from resource_management.libraries.resources.xml_config import XmlConfig
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions.get_hdp_version import get_hdp_version
+from resource_management.libraries.functions.get_stack_version import get_stack_version
 from resource_management.core.logger import Logger
 from resource_management.core.logger import Logger
 from resource_management.core.source import DownloadSource, InlineTemplate
 from resource_management.core.source import DownloadSource, InlineTemplate
 from resource_management.libraries.functions.ranger_functions_v2 import RangeradminV2
 from resource_management.libraries.functions.ranger_functions_v2 import RangeradminV2
@@ -44,7 +44,7 @@ def setup_ranger_plugin(component_select_name, service_name,
                         plugin_policymgr_ssl_properties, plugin_policymgr_ssl_attributes,
                         plugin_policymgr_ssl_properties, plugin_policymgr_ssl_attributes,
                         component_list, audit_db_is_enabled, credential_file, 
                         component_list, audit_db_is_enabled, credential_file, 
                         xa_audit_db_password, ssl_truststore_password,
                         xa_audit_db_password, ssl_truststore_password,
-                        ssl_keystore_password, api_version=None, hdp_version_override = None, skip_if_rangeradmin_down = True):
+                        ssl_keystore_password, api_version=None, stack_version_override = None, skip_if_rangeradmin_down = True):
 
 
   if audit_db_is_enabled:
   if audit_db_is_enabled:
     File(component_downloaded_custom_connector,
     File(component_downloaded_custom_connector,
@@ -59,9 +59,9 @@ def setup_ranger_plugin(component_select_name, service_name,
 
 
     File(component_driver_curl_target, mode=0644)
     File(component_driver_curl_target, mode=0644)
 
 
-  hdp_version = get_hdp_version(component_select_name)
-  if hdp_version_override is not None:
-    hdp_version = hdp_version_override
+  stack_version = get_stack_version(component_select_name)
+  if stack_version_override is not None:
+    stack_version = stack_version_override
 
 
   component_conf_dir = conf_dict
   component_conf_dir = conf_dict
   
   
@@ -135,9 +135,9 @@ def setup_ranger_plugin(component_select_name, service_name,
         mode=0744) 
         mode=0744) 
 
 
     #This should be done by rpm
     #This should be done by rpm
-    #setup_ranger_plugin_jar_symblink(hdp_version, service_name, component_list)
+    #setup_ranger_plugin_jar_symblink(stack_version, service_name, component_list)
 
 
-    setup_ranger_plugin_keystore(service_name, audit_db_is_enabled, hdp_version, credential_file,
+    setup_ranger_plugin_keystore(service_name, audit_db_is_enabled, stack_version, credential_file,
               xa_audit_db_password, ssl_truststore_password, ssl_keystore_password,
               xa_audit_db_password, ssl_truststore_password, ssl_keystore_password,
               component_user, component_group, java_home)
               component_user, component_group, java_home)
 
 
@@ -147,22 +147,22 @@ def setup_ranger_plugin(component_select_name, service_name,
     )    
     )    
 
 
 
 
-def setup_ranger_plugin_jar_symblink(hdp_version, service_name, component_list):
+def setup_ranger_plugin_jar_symblink(stack_version, service_name, component_list):
 
 
-  jar_files = os.listdir(format('/usr/hdp/{hdp_version}/ranger-{service_name}-plugin/lib'))
+  jar_files = os.listdir(format('/usr/hdp/{stack_version}/ranger-{service_name}-plugin/lib'))
 
 
   for jar_file in jar_files:
   for jar_file in jar_files:
     for component in component_list:
     for component in component_list:
-      Execute(('ln','-sf',format('/usr/hdp/{hdp_version}/ranger-{service_name}-plugin/lib/{jar_file}'),format('/usr/hdp/current/{component}/lib/{jar_file}')),
+      Execute(('ln','-sf',format('/usr/hdp/{stack_version}/ranger-{service_name}-plugin/lib/{jar_file}'),format('/usr/hdp/current/{component}/lib/{jar_file}')),
       not_if=format('ls /usr/hdp/current/{component}/lib/{jar_file}'),
       not_if=format('ls /usr/hdp/current/{component}/lib/{jar_file}'),
-      only_if=format('ls /usr/hdp/{hdp_version}/ranger-{service_name}-plugin/lib/{jar_file}'),
+      only_if=format('ls /usr/hdp/{stack_version}/ranger-{service_name}-plugin/lib/{jar_file}'),
       sudo=True)
       sudo=True)
 
 
-def setup_ranger_plugin_keystore(service_name, audit_db_is_enabled, hdp_version, credential_file, xa_audit_db_password,
+def setup_ranger_plugin_keystore(service_name, audit_db_is_enabled, stack_version, credential_file, xa_audit_db_password,
                                 ssl_truststore_password, ssl_keystore_password, component_user, component_group, java_home):
                                 ssl_truststore_password, ssl_keystore_password, component_user, component_group, java_home):
 
 
-  cred_lib_path = format('/usr/hdp/{hdp_version}/ranger-{service_name}-plugin/install/lib/*')
-  cred_setup_prefix = (format('/usr/hdp/{hdp_version}/ranger-{service_name}-plugin/ranger_credential_helper.py'), '-l', cred_lib_path)
+  cred_lib_path = format('/usr/hdp/{stack_version}/ranger-{service_name}-plugin/install/lib/*')
+  cred_setup_prefix = (format('/usr/hdp/{stack_version}/ranger-{service_name}-plugin/ranger_credential_helper.py'), '-l', cred_lib_path)
 
 
   if audit_db_is_enabled:
   if audit_db_is_enabled:
     cred_setup = cred_setup_prefix + ('-f', credential_file, '-k', 'auditDBCred', '-v', PasswordString(xa_audit_db_password), '-c', '1')
     cred_setup = cred_setup_prefix + ('-f', credential_file, '-k', 'auditDBCred', '-v', PasswordString(xa_audit_db_password), '-c', '1')

+ 28 - 28
ambari-common/src/main/python/resource_management/libraries/functions/hdp_select.py → ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py

@@ -25,18 +25,18 @@ from resource_management.core.logger import Logger
 from resource_management.core.exceptions import Fail
 from resource_management.core.exceptions import Fail
 from resource_management.core.resources.system import Execute
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
-from resource_management.libraries.functions.get_hdp_version import get_hdp_version
+from resource_management.libraries.functions.get_stack_version import get_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 from resource_management.core.shell import call
 from resource_management.core.shell import call
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.version_select_util import get_versions_from_stack_root
 from resource_management.libraries.functions.version_select_util import get_versions_from_stack_root
 
 
-HDP_SELECT = '/usr/bin/hdp-select'
-HDP_SELECT_PREFIX = ('ambari-python-wrap', HDP_SELECT)
+STACK_SELECT = '/usr/bin/hdp-select'
+STACK_SELECT_PREFIX = ('ambari-python-wrap', STACK_SELECT)
 
 
 # hdp-select set oozie-server 2.2.0.0-1234
 # hdp-select set oozie-server 2.2.0.0-1234
-TEMPLATE = HDP_SELECT_PREFIX + ('set',)
+TEMPLATE = STACK_SELECT_PREFIX + ('set',)
 
 
 # a mapping of Ambari server role to hdp-select component name for all
 # a mapping of Ambari server role to hdp-select component name for all
 # non-clients
 # non-clients
@@ -154,33 +154,33 @@ def select(component, version):
       Logger.info("After {0}, reloaded module {1}".format(command, moduleName))
       Logger.info("After {0}, reloaded module {1}".format(command, moduleName))
 
 
 
 
-def get_role_component_current_hdp_version():
+def get_role_component_current_stack_version():
   """
   """
   Gets the current HDP version of the component that this role command is for.
   Gets the current HDP version of the component that this role command is for.
   :return:  the current HDP version of the specified component or None
   :return:  the current HDP version of the specified component or None
   """
   """
-  hdp_select_component = None
+  stack_select_component = None
   role = default("/role", "")
   role = default("/role", "")
   role_command =  default("/roleCommand", "")
   role_command =  default("/roleCommand", "")
 
 
   if role in SERVER_ROLE_DIRECTORY_MAP:
   if role in SERVER_ROLE_DIRECTORY_MAP:
-    hdp_select_component = SERVER_ROLE_DIRECTORY_MAP[role]
+    stack_select_component = SERVER_ROLE_DIRECTORY_MAP[role]
   elif role_command == "SERVICE_CHECK" and role in SERVICE_CHECK_DIRECTORY_MAP:
   elif role_command == "SERVICE_CHECK" and role in SERVICE_CHECK_DIRECTORY_MAP:
-    hdp_select_component = SERVICE_CHECK_DIRECTORY_MAP[role]
+    stack_select_component = SERVICE_CHECK_DIRECTORY_MAP[role]
 
 
-  if hdp_select_component is None:
+  if stack_select_component is None:
     return None
     return None
 
 
-  current_hdp_version = get_hdp_version(hdp_select_component)
+  current_stack_version = get_stack_version(stack_select_component)
 
 
-  if current_hdp_version is None:
+  if current_stack_version is None:
     Logger.warning("Unable to determine hdp-select version for {0}".format(
     Logger.warning("Unable to determine hdp-select version for {0}".format(
-      hdp_select_component))
+      stack_select_component))
   else:
   else:
     Logger.info("{0} is currently at version {1}".format(
     Logger.info("{0} is currently at version {1}".format(
-      hdp_select_component, current_hdp_version))
+      stack_select_component, current_stack_version))
 
 
-  return current_hdp_version
+  return current_stack_version
 
 
 
 
 def get_hadoop_dir(target, force_latest_on_upgrade=False):
 def get_hadoop_dir(target, force_latest_on_upgrade=False):
@@ -201,7 +201,7 @@ def get_hadoop_dir(target, force_latest_on_upgrade=False):
 
 
   hadoop_dir = HADOOP_DIR_DEFAULTS[target]
   hadoop_dir = HADOOP_DIR_DEFAULTS[target]
 
 
-  if Script.is_hdp_stack_greater_or_equal("2.2"):
+  if Script.is_stack_greater_or_equal("2.2"):
     # home uses a different template
     # home uses a different template
     if target == "home":
     if target == "home":
       hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format("current", "hadoop-client")
       hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format("current", "hadoop-client")
@@ -218,9 +218,9 @@ def get_hadoop_dir(target, force_latest_on_upgrade=False):
 
 
         # determine if hdp-select has been run and if not, then use the current
         # determine if hdp-select has been run and if not, then use the current
         # hdp version until this component is upgraded
         # hdp version until this component is upgraded
-        current_hdp_version = get_role_component_current_hdp_version()
-        if current_hdp_version is not None and stack_version != current_hdp_version:
-          stack_version = current_hdp_version
+        current_stack_version = get_role_component_current_stack_version()
+        if current_stack_version is not None and stack_version != current_stack_version:
+          stack_version = current_stack_version
 
 
         if target == "home":
         if target == "home":
           # home uses a different template
           # home uses a different template
@@ -243,8 +243,8 @@ def get_hadoop_dir_for_stack_version(target, stack_version):
 
 
   hadoop_dir = HADOOP_DIR_DEFAULTS[target]
   hadoop_dir = HADOOP_DIR_DEFAULTS[target]
 
 
-  formatted_stack_version = format_hdp_stack_version(stack_version)
-  if Script.is_hdp_stack_greater_or_equal_to(formatted_stack_version, "2.2"):
+  formatted_stack_version = format_stack_version(stack_version)
+  if Script.is_stack_greater_or_equal_to(formatted_stack_version, "2.2"):
     # home uses a different template
     # home uses a different template
     if target == "home":
     if target == "home":
       hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_version, "hadoop")
       hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_version, "hadoop")
@@ -271,7 +271,7 @@ def _get_upgrade_stack():
   return None
   return None
 
 
 
 
-def get_hdp_versions(stack_root):
+def get_stack_versions(stack_root):
   """
   """
   Gets list of stack versions installed on the host.
   Gets list of stack versions installed on the host.
   Be default a call to hdp-select versions is made to get the list of installed stack versions.
   Be default a call to hdp-select versions is made to get the list of installed stack versions.
@@ -279,7 +279,7 @@ def get_hdp_versions(stack_root):
   :param stack_root: Stack install root
   :param stack_root: Stack install root
   :return: Returns list of installed stack versions.
   :return: Returns list of installed stack versions.
   """
   """
-  code, out = call(HDP_SELECT_PREFIX + ('versions',))
+  code, out = call(STACK_SELECT_PREFIX + ('versions',))
   versions = []
   versions = []
   if 0 == code:
   if 0 == code:
     for line in out.splitlines():
     for line in out.splitlines():
@@ -288,7 +288,7 @@ def get_hdp_versions(stack_root):
     versions = get_versions_from_stack_root(stack_root)
     versions = get_versions_from_stack_root(stack_root)
   return versions
   return versions
 
 
-def get_hdp_version_before_install(component_name):
+def get_stack_version_before_install(component_name):
   """
   """
   Works in the similar way to 'hdp-select status component', 
   Works in the similar way to 'hdp-select status component', 
   but also works for not yet installed packages.
   but also works for not yet installed packages.
@@ -297,11 +297,11 @@ def get_hdp_version_before_install(component_name):
   """
   """
   component_dir = HADOOP_HOME_DIR_TEMPLATE.format("current", component_name)
   component_dir = HADOOP_HOME_DIR_TEMPLATE.format("current", component_name)
   if os.path.islink(component_dir):
   if os.path.islink(component_dir):
-    hdp_version = os.path.basename(os.path.dirname(os.readlink(component_dir)))
-    match = re.match('[0-9]+.[0-9]+.[0-9]+.[0-9]+-[0-9]+', hdp_version)
+    stack_version = os.path.basename(os.path.dirname(os.readlink(component_dir)))
+    match = re.match('[0-9]+.[0-9]+.[0-9]+.[0-9]+-[0-9]+', stack_version)
     if match is None:
     if match is None:
-      Logger.info('Failed to get extracted version with hdp-select in method get_hdp_version_before_install')
+      Logger.info('Failed to get extracted version with hdp-select in method get_stack_version_before_install')
       return None # lazy fail
       return None # lazy fail
-    return hdp_version
+    return stack_version
   else:
   else:
     return None
     return None

+ 4 - 4
ambari-common/src/main/python/resource_management/libraries/functions/version.py

@@ -34,7 +34,7 @@ def _normalize(v, desired_segments=0):
   return [int(x) for x in v_list]
   return [int(x) for x in v_list]
 
 
 
 
-def format_hdp_stack_version(input):
+def format_stack_version(input):
   """
   """
   :param input: Input string, e.g. "2.2" or "GlusterFS", or "2.0.6.GlusterFS", or "2.2.0.1-885"
   :param input: Input string, e.g. "2.2" or "GlusterFS", or "2.0.6.GlusterFS", or "2.2.0.1-885"
   :return: Returns a well-formatted HDP stack version of the form #.#.#.# as a string.
   :return: Returns a well-formatted HDP stack version of the form #.#.#.# as a string.
@@ -67,11 +67,11 @@ def compare_versions(version1, version2, format=False):
   Stack Version 2.0.6.0 vs 2.2.0.0
   Stack Version 2.0.6.0 vs 2.2.0.0
   :param version1: First parameter for version
   :param version1: First parameter for version
   :param version2: Second parameter for version
   :param version2: Second parameter for version
-  :param format: optionally format the versions via format_hdp_stack_version before comparing them
+  :param format: optionally format the versions via format_stack_version before comparing them
   :return: Returns -1 if version1 is before version2, 0 if they are equal, and 1 if version1 is after version2
   :return: Returns -1 if version1 is before version2, 0 if they are equal, and 1 if version1 is after version2
   """
   """
-  v1 = version1 if not format else format_hdp_stack_version(version1)
-  v2 = version2 if not format else format_hdp_stack_version(version2)
+  v1 = version1 if not format else format_stack_version(version1)
+  v2 = version2 if not format else format_stack_version(version2)
 
 
   max_segments = max(len(v1.split(".")), len(v2.split(".")))
   max_segments = max(len(v1.split(".")), len(v2.split(".")))
   return cmp(_normalize(v1, desired_segments=max_segments), _normalize(v2, desired_segments=max_segments))
   return cmp(_normalize(v1, desired_segments=max_segments), _normalize(v2, desired_segments=max_segments))

+ 5 - 5
ambari-common/src/main/python/resource_management/libraries/functions/version_select_util.py

@@ -47,23 +47,23 @@ def get_component_version(stack_name, component_name):
   if stack_name == "HDP":
   if stack_name == "HDP":
     tmpfile = tempfile.NamedTemporaryFile()
     tmpfile = tempfile.NamedTemporaryFile()
 
 
-    get_hdp_comp_version_cmd = ""
+    get_stack_comp_version_cmd = ""
     try:
     try:
       # This is necessary because Ubuntu returns "stdin: is not a tty", see AMBARI-8088
       # This is necessary because Ubuntu returns "stdin: is not a tty", see AMBARI-8088
       with open(tmpfile.name, 'r') as file:
       with open(tmpfile.name, 'r') as file:
-        get_hdp_comp_version_cmd = '/usr/bin/hdp-select status %s > %s' % (component_name, tmpfile.name)
-        code, stdoutdata = shell.call(get_hdp_comp_version_cmd)
+        get_stack_comp_version_cmd = '/usr/bin/hdp-select status %s > %s' % (component_name, tmpfile.name)
+        code, stdoutdata = shell.call(get_stack_comp_version_cmd)
         out = file.read()
         out = file.read()
 
 
       if code != 0 or out is None:
       if code != 0 or out is None:
         raise Exception("Code is nonzero or output is empty")
         raise Exception("Code is nonzero or output is empty")
 
 
-      Logger.debug("Command: %s\nOutput: %s" % (get_hdp_comp_version_cmd, str(out)))
+      Logger.debug("Command: %s\nOutput: %s" % (get_stack_comp_version_cmd, str(out)))
       matches = re.findall(r"([\d\.]+\-\d+)", out)
       matches = re.findall(r"([\d\.]+\-\d+)", out)
       version = matches[0] if matches and len(matches) > 0 else None
       version = matches[0] if matches and len(matches) > 0 else None
     except Exception, e:
     except Exception, e:
       Logger.error("Could not determine HDP version for component %s by calling '%s'. Return Code: %s, Output: %s." %
       Logger.error("Could not determine HDP version for component %s by calling '%s'. Return Code: %s, Output: %s." %
-                   (component_name, get_hdp_comp_version_cmd, str(code), str(out)))
+                   (component_name, get_stack_comp_version_cmd, str(code), str(out)))
   elif stack_name == "HDPWIN":
   elif stack_name == "HDPWIN":
     pass
     pass
   elif stack_name == "GlusterFS":
   elif stack_name == "GlusterFS":

+ 25 - 25
ambari-common/src/main/python/resource_management/libraries/script/script.py

@@ -42,7 +42,7 @@ from resource_management.core.exceptions import Fail, ClientComponentHasNoStatus
 from resource_management.core.resources.packaging import Package
 from resource_management.core.resources.packaging import Package
 from resource_management.libraries.functions.version_select_util import get_component_version
 from resource_management.libraries.functions.version_select_util import get_component_version
 from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.functions.version import compare_versions
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.functions import packages_analyzer
 from resource_management.libraries.functions import packages_analyzer
 from resource_management.libraries.script.config_dictionary import ConfigDictionary, UnknownConfiguration
 from resource_management.libraries.script.config_dictionary import ConfigDictionary, UnknownConfiguration
@@ -52,7 +52,7 @@ from contextlib import closing
 import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set.
 import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set.
 
 
 if OSCheck.is_windows_family():
 if OSCheck.is_windows_family():
-  from resource_management.libraries.functions.install_hdp_msi import install_windows_msi
+  from resource_management.libraries.functions.install_windows_msi import install_windows_msi
   from resource_management.libraries.functions.reload_windows_env import reload_windows_env
   from resource_management.libraries.functions.reload_windows_env import reload_windows_env
   from resource_management.libraries.functions.zip_archive import archive_dir
   from resource_management.libraries.functions.zip_archive import archive_dir
   from resource_management.libraries.resources import Msi
   from resource_management.libraries.resources import Msi
@@ -177,8 +177,8 @@ class Script(object):
     """
     """
     from resource_management.libraries.functions.default import default
     from resource_management.libraries.functions.default import default
     stack_version_unformatted = str(default("/hostLevelParams/stack_version", ""))
     stack_version_unformatted = str(default("/hostLevelParams/stack_version", ""))
-    hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
-    if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
+    stack_version_formatted = format_stack_version(stack_version_unformatted)
+    if stack_version_formatted != "" and compare_versions(stack_version_formatted, '2.2') >= 0:
       if command_name.lower() == "status":
       if command_name.lower() == "status":
         request_version = default("/commandParams/request_version", None)
         request_version = default("/commandParams/request_version", None)
         if request_version is not None:
         if request_version is not None:
@@ -259,13 +259,13 @@ class Script(object):
     
     
     before the call. However takes a bit of time, so better to avoid.
     before the call. However takes a bit of time, so better to avoid.
 
 
-    :return: hdp version including the build number. e.g.: 2.3.4.0-1234.
+    :return: stack version including the build number. e.g.: 2.3.4.0-1234.
     """
     """
     # preferred way is to get the actual selected version of current component
     # preferred way is to get the actual selected version of current component
     component_name = self.get_component_name()
     component_name = self.get_component_name()
     if not Script.stack_version_from_distro_select and component_name:
     if not Script.stack_version_from_distro_select and component_name:
-      from resource_management.libraries.functions import hdp_select
-      Script.stack_version_from_distro_select = hdp_select.get_hdp_version_before_install(component_name)
+      from resource_management.libraries.functions import stack_select
+      Script.stack_version_from_distro_select = stack_select.get_stack_version_before_install(component_name)
       
       
     # if hdp-select has not yet been done (situations like first install), we can use hdp-select version itself.
     # if hdp-select has not yet been done (situations like first install), we can use hdp-select version itself.
     if not Script.stack_version_from_distro_select:
     if not Script.stack_version_from_distro_select:
@@ -329,7 +329,7 @@ class Script(object):
     return default("/hostLevelParams/stack_name", None)
     return default("/hostLevelParams/stack_name", None)
 
 
   @staticmethod
   @staticmethod
-  def get_hdp_stack_version():
+  def get_stack_version():
     """
     """
     Gets the normalized version of the HDP stack in the form #.#.#.# if it is
     Gets the normalized version of the HDP stack in the form #.#.#.# if it is
     present on the configurations sent.
     present on the configurations sent.
@@ -348,7 +348,7 @@ class Script(object):
     if stack_version_unformatted is None or stack_version_unformatted == '':
     if stack_version_unformatted is None or stack_version_unformatted == '':
       return None
       return None
 
 
-    return format_hdp_stack_version(stack_version_unformatted)
+    return format_stack_version(stack_version_unformatted)
 
 
 
 
   @staticmethod
   @staticmethod
@@ -360,57 +360,57 @@ class Script(object):
 
 
 
 
   @staticmethod
   @staticmethod
-  def is_hdp_stack_greater(formatted_hdp_stack_version, compare_to_version):
+  def is_stack_greater(stack_version_formatted, compare_to_version):
     """
     """
-    Gets whether the provided formatted_hdp_stack_version (normalized)
+    Gets whether the provided stack_version_formatted (normalized)
     is greater than the specified stack version
     is greater than the specified stack version
-    :param formatted_hdp_stack_version: the version of stack to compare
+    :param stack_version_formatted: the version of stack to compare
     :param compare_to_version: the version of stack to compare to
     :param compare_to_version: the version of stack to compare to
     :return: True if the command's stack is greater than the specified version
     :return: True if the command's stack is greater than the specified version
     """
     """
-    if formatted_hdp_stack_version is None or formatted_hdp_stack_version == "":
+    if stack_version_formatted is None or stack_version_formatted == "":
       return False
       return False
 
 
-    return compare_versions(formatted_hdp_stack_version, compare_to_version) > 0
+    return compare_versions(stack_version_formatted, compare_to_version) > 0
 
 
   @staticmethod
   @staticmethod
-  def is_hdp_stack_greater_or_equal(compare_to_version):
+  def is_stack_greater_or_equal(compare_to_version):
     """
     """
     Gets whether the hostLevelParams/stack_version, after being normalized,
     Gets whether the hostLevelParams/stack_version, after being normalized,
     is greater than or equal to the specified stack version
     is greater than or equal to the specified stack version
     :param compare_to_version: the version to compare to
     :param compare_to_version: the version to compare to
     :return: True if the command's stack is greater than or equal the specified version
     :return: True if the command's stack is greater than or equal the specified version
     """
     """
-    return Script.is_hdp_stack_greater_or_equal_to(Script.get_hdp_stack_version(), compare_to_version)
+    return Script.is_stack_greater_or_equal_to(Script.get_stack_version(), compare_to_version)
 
 
   @staticmethod
   @staticmethod
-  def is_hdp_stack_greater_or_equal_to(formatted_hdp_stack_version, compare_to_version):
+  def is_stack_greater_or_equal_to(stack_version_formatted, compare_to_version):
     """
     """
-    Gets whether the provided formatted_hdp_stack_version (normalized)
+    Gets whether the provided stack_version_formatted (normalized)
     is greater than or equal to the specified stack version
     is greater than or equal to the specified stack version
-    :param formatted_hdp_stack_version: the version of stack to compare
+    :param stack_version_formatted: the version of stack to compare
     :param compare_to_version: the version of stack to compare to
     :param compare_to_version: the version of stack to compare to
     :return: True if the command's stack is greater than or equal to the specified version
     :return: True if the command's stack is greater than or equal to the specified version
     """
     """
-    if formatted_hdp_stack_version is None or formatted_hdp_stack_version == "":
+    if stack_version_formatted is None or stack_version_formatted == "":
       return False
       return False
 
 
-    return compare_versions(formatted_hdp_stack_version, compare_to_version) >= 0
+    return compare_versions(stack_version_formatted, compare_to_version) >= 0
 
 
   @staticmethod
   @staticmethod
-  def is_hdp_stack_less_than(compare_to_version):
+  def is_stack_less_than(compare_to_version):
     """
     """
     Gets whether the hostLevelParams/stack_version, after being normalized,
     Gets whether the hostLevelParams/stack_version, after being normalized,
     is less than the specified stack version
     is less than the specified stack version
     :param compare_to_version: the version to compare to
     :param compare_to_version: the version to compare to
     :return: True if the command's stack is less than the specified version
     :return: True if the command's stack is less than the specified version
     """
     """
-    hdp_stack_version = Script.get_hdp_stack_version()
+    stack_version_formatted = Script.get_stack_version()
 
 
-    if hdp_stack_version is None:
+    if stack_version_formatted is None:
       return False
       return False
 
 
-    return compare_versions(hdp_stack_version, compare_to_version) < 0
+    return compare_versions(stack_version_formatted, compare_to_version) < 0
 
 
   def install(self, env):
   def install(self, env):
     """
     """

+ 3 - 3
ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py

@@ -21,7 +21,7 @@ limitations under the License.
 from resource_management.core.logger import Logger
 from resource_management.core.logger import Logger
 from resource_management.core.exceptions import ClientComponentHasNoStatus
 from resource_management.core.exceptions import ClientComponentHasNoStatus
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 
 
 from accumulo_configuration import setup_conf_dir
 from accumulo_configuration import setup_conf_dir
@@ -54,12 +54,12 @@ class AccumuloClient(Script):
 
 
     # this function should not execute if the version can't be determined or
     # this function should not execute if the version can't be determined or
     # is not at least HDP 2.2.0.0
     # is not at least HDP 2.2.0.0
-    if Script.is_hdp_stack_less_than("2.2"):
+    if Script.is_stack_less_than("2.2"):
       return
       return
 
 
     Logger.info("Executing Accumulo Client Upgrade pre-restart")
     Logger.info("Executing Accumulo Client Upgrade pre-restart")
     conf_select.select(params.stack_name, "accumulo", params.version)
     conf_select.select(params.stack_name, "accumulo", params.version)
-    hdp_select.select("accumulo-client", params.version)
+    stack_select.select("accumulo-client", params.version)
 
 
 if __name__ == "__main__":
 if __name__ == "__main__":
   AccumuloClient().execute()
   AccumuloClient().execute()

+ 11 - 11
ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py

@@ -22,7 +22,7 @@ from resource_management.core.logger import Logger
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.security_commons import build_expectations
 from resource_management.libraries.functions.security_commons import build_expectations
 from resource_management.libraries.functions.security_commons import cached_kinit_executor
 from resource_management.libraries.functions.security_commons import cached_kinit_executor
 from resource_management.libraries.functions.security_commons import get_params_from_filesystem
 from resource_management.libraries.functions.security_commons import get_params_from_filesystem
@@ -37,7 +37,7 @@ class AccumuloScript(Script):
 
 
   # a mapping between the component named used by these scripts and the name
   # a mapping between the component named used by these scripts and the name
   # which is used by hdp-select
   # which is used by hdp-select
-  COMPONENT_TO_HDP_SELECT_MAPPING = {
+  COMPONENT_TO_STACK_SELECT_MAPPING = {
     "gc" : "accumulo-gc",
     "gc" : "accumulo-gc",
     "master" : "accumulo-master",
     "master" : "accumulo-master",
     "monitor" : "accumulo-monitor",
     "monitor" : "accumulo-monitor",
@@ -55,11 +55,11 @@ class AccumuloScript(Script):
     :return:  the name of the component on the HDP stack which is used by
     :return:  the name of the component on the HDP stack which is used by
               hdp-select
               hdp-select
     """
     """
-    if self.component not in self.COMPONENT_TO_HDP_SELECT_MAPPING:
+    if self.component not in self.COMPONENT_TO_STACK_SELECT_MAPPING:
       return None
       return None
 
 
-    hdp_component = self.COMPONENT_TO_HDP_SELECT_MAPPING[self.component]
-    return {"HDP": hdp_component}
+    stack_component = self.COMPONENT_TO_STACK_SELECT_MAPPING[self.component]
+    return {"HDP": stack_component}
 
 
 
 
   def install(self, env):
   def install(self, env):
@@ -102,21 +102,21 @@ class AccumuloScript(Script):
 
 
     # this function should not execute if the version can't be determined or
     # this function should not execute if the version can't be determined or
     # is not at least HDP 2.2.0.0
     # is not at least HDP 2.2.0.0
-    if Script.is_hdp_stack_less_than("2.2"):
+    if Script.is_stack_less_than("2.2"):
       return
       return
 
 
-    if self.component not in self.COMPONENT_TO_HDP_SELECT_MAPPING:
+    if self.component not in self.COMPONENT_TO_STACK_SELECT_MAPPING:
       Logger.info("Unable to execute an upgrade for unknown component {0}".format(self.component))
       Logger.info("Unable to execute an upgrade for unknown component {0}".format(self.component))
       raise Fail("Unable to execute an upgrade for unknown component {0}".format(self.component))
       raise Fail("Unable to execute an upgrade for unknown component {0}".format(self.component))
 
 
-    hdp_component = self.COMPONENT_TO_HDP_SELECT_MAPPING[self.component]
+    stack_component = self.COMPONENT_TO_STACK_SELECT_MAPPING[self.component]
 
 
-    Logger.info("Executing Accumulo Upgrade pre-restart for {0}".format(hdp_component))
+    Logger.info("Executing Accumulo Upgrade pre-restart for {0}".format(stack_component))
     conf_select.select(params.stack_name, "accumulo", params.version)
     conf_select.select(params.stack_name, "accumulo", params.version)
-    hdp_select.select(hdp_component, params.version)
+    stack_select.select(stack_component, params.version)
 
 
     # some accumulo components depend on the client, so update that too
     # some accumulo components depend on the client, so update that too
-    hdp_select.select("accumulo-client", params.version)
+    stack_select.select("accumulo-client", params.version)
 
 
 
 
   def security_status(self, env):
   def security_status(self, env):

+ 6 - 6
ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py

@@ -18,10 +18,10 @@ limitations under the License.
 
 
 """
 """
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.get_bare_principal import get_bare_principal
 from resource_management.libraries.functions.get_bare_principal import get_bare_principal
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
@@ -39,10 +39,10 @@ security_enabled = status_params.security_enabled
 stack_name = default("/hostLevelParams/stack_name", None)
 stack_name = default("/hostLevelParams/stack_name", None)
 version = default("/commandParams/version", None)
 version = default("/commandParams/version", None)
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
 
 
 has_secure_user_auth = False
 has_secure_user_auth = False
-if Script.is_hdp_stack_greater_or_equal("2.3"):
+if Script.is_stack_greater_or_equal("2.3"):
   has_secure_user_auth = True
   has_secure_user_auth = True
 
 
 # configuration directories
 # configuration directories
@@ -50,8 +50,8 @@ conf_dir = status_params.conf_dir
 server_conf_dir = status_params.server_conf_dir
 server_conf_dir = status_params.server_conf_dir
 
 
 # service locations
 # service locations
-hadoop_prefix = hdp_select.get_hadoop_dir("home")
-hadoop_bin_dir = hdp_select.get_hadoop_dir("bin")
+hadoop_prefix = stack_select.get_hadoop_dir("home")
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
 zookeeper_home = "/usr/hdp/current/zookeeper-client"
 zookeeper_home = "/usr/hdp/current/zookeeper-client"
 
 
 # the configuration direction for HDFS/YARN/MapR is the hadoop config
 # the configuration direction for HDFS/YARN/MapR is the hadoop config

+ 3 - 3
ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py

@@ -21,7 +21,7 @@ limitations under the License.
 import sys
 import sys
 from resource_management import *
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 
 
 from metadata import metadata
 from metadata import metadata
 
 
@@ -37,9 +37,9 @@ class AtlasClient(Script):
   #   import params
   #   import params
   #   env.set_params(params)
   #   env.set_params(params)
   #
   #
-  #   if params.version and compare_versions(format_hdp_stack_version(params.version), '2.3.0.0') >= 0:
+  #   if params.version and compare_versions(format_stack_version(params.version), '2.3.0.0') >= 0:
   #     conf_select.select(params.stack_name, "atlas", params.version)
   #     conf_select.select(params.stack_name, "atlas", params.version)
-  #     hdp_select.select("atlas-client", params.version)
+  #     stack_select.select("atlas-client", params.version)
 
 
   def install(self, env):
   def install(self, env):
     self.install_packages(env)
     self.install_packages(env)

+ 4 - 4
ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py

@@ -18,10 +18,10 @@ limitations under the License.
 """
 """
 from metadata import metadata
 from metadata import metadata
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management import Execute, check_process_status, Script
 from resource_management import Execute, check_process_status, Script
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from resource_management.libraries.functions.security_commons import build_expectations, \
 from resource_management.libraries.functions.security_commons import build_expectations, \
   get_params_from_filesystem, validate_security_config_properties, \
   get_params_from_filesystem, validate_security_config_properties, \
   FILE_TYPE_PROPERTIES
   FILE_TYPE_PROPERTIES
@@ -43,9 +43,9 @@ class MetadataServer(Script):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.3.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.3.0.0') >= 0:
       # conf_select.select(params.stack_name, "atlas", params.version)
       # conf_select.select(params.stack_name, "atlas", params.version)
-      hdp_select.select("atlas-server", params.version)
+      stack_select.select("atlas-server", params.version)
 
 
   def start(self, env, upgrade_type=None):
   def start(self, env, upgrade_type=None):
     import params
     import params

+ 2 - 2
ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py

@@ -19,7 +19,7 @@ limitations under the License.
 """
 """
 import os
 import os
 import sys
 import sys
-from resource_management import format_hdp_stack_version, Script
+from resource_management import format_stack_version, Script
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 
 
@@ -46,7 +46,7 @@ version = default("/commandParams/version", None)
 
 
 # hdp version
 # hdp version
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
 
 
 metadata_home = os.environ['METADATA_HOME_DIR'] if 'METADATA_HOME_DIR' in os.environ else '/usr/hdp/current/atlas-server'
 metadata_home = os.environ['METADATA_HOME_DIR'] if 'METADATA_HOME_DIR' in os.environ else '/usr/hdp/current/atlas-server'
 metadata_bin = format("{metadata_home}/bin")
 metadata_bin = format("{metadata_home}/bin")

+ 3 - 3
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py

@@ -19,7 +19,7 @@ limitations under the License.
 
 
 from resource_management import *
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from falcon import falcon
 from falcon import falcon
 from ambari_commons import OSConst
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
@@ -49,12 +49,12 @@ class FalconClientLinux(FalconClient):
 
 
     # this function should not execute if the version can't be determined or
     # this function should not execute if the version can't be determined or
     # is not at least HDP 2.2.0.0
     # is not at least HDP 2.2.0.0
-    if not params.version or compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') < 0:
+    if not params.version or compare_versions(format_stack_version(params.version), '2.2.0.0') < 0:
       return
       return
 
 
     Logger.info("Executing Falcon Client Stack Upgrade pre-restart")
     Logger.info("Executing Falcon Client Stack Upgrade pre-restart")
     conf_select.select(params.stack_name, "falcon", params.version)
     conf_select.select(params.stack_name, "falcon", params.version)
-    hdp_select.select("falcon-client", params.version)
+    stack_select.select("falcon-client", params.version)
 
 
   def security_status(self, env):
   def security_status(self, env):
     import status_params
     import status_params

+ 3 - 3
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py

@@ -22,7 +22,7 @@ import falcon_server_upgrade
 from resource_management.core.logger import Logger
 from resource_management.core.logger import Logger
 from resource_management.libraries.script import Script
 from resource_management.libraries.script import Script
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.functions.security_commons import build_expectations
 from resource_management.libraries.functions.security_commons import build_expectations
 from resource_management.libraries.functions.security_commons import cached_kinit_executor
 from resource_management.libraries.functions.security_commons import cached_kinit_executor
@@ -77,12 +77,12 @@ class FalconServerLinux(FalconServer):
 
 
     # this function should not execute if the version can't be determined or
     # this function should not execute if the version can't be determined or
     # is not at least HDP 2.2.0.0
     # is not at least HDP 2.2.0.0
-    if Script.is_hdp_stack_less_than("2.2"):
+    if Script.is_stack_less_than("2.2"):
       return
       return
 
 
     Logger.info("Executing Falcon Server Stack Upgrade pre-restart")
     Logger.info("Executing Falcon Server Stack Upgrade pre-restart")
     conf_select.select(params.stack_name, "falcon", params.version)
     conf_select.select(params.stack_name, "falcon", params.version)
-    hdp_select.select("falcon-server", params.version)
+    stack_select.select("falcon-server", params.version)
     falcon_server_upgrade.pre_start_restore()
     falcon_server_upgrade.pre_start_restore()
 
 
   def security_status(self, env):
   def security_status(self, env):

+ 6 - 6
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py

@@ -19,9 +19,9 @@ limitations under the License.
 import status_params
 import status_params
 
 
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
@@ -35,14 +35,14 @@ stack_name = default("/hostLevelParams/stack_name", None)
 version = default("/commandParams/version", None)
 version = default("/commandParams/version", None)
 
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
 etc_prefix_dir = "/etc/falcon"
 etc_prefix_dir = "/etc/falcon"
 
 
 # hadoop params
 # hadoop params
-hadoop_home_dir = hdp_select.get_hadoop_dir("home")
-hadoop_bin_dir = hdp_select.get_hadoop_dir("bin")
+hadoop_home_dir = stack_select.get_hadoop_dir("home")
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
 
 
-if Script.is_hdp_stack_greater_or_equal("2.2"):
+if Script.is_stack_greater_or_equal("2.2"):
 
 
   # if this is a server action, then use the server binaries; smoke tests
   # if this is a server action, then use the server binaries; smoke tests
   # use the client binaries
   # use the client binaries

+ 1 - 1
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/status_params.py

@@ -46,7 +46,7 @@ else:
   hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
   hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 
 
   falcon_conf_dir = "/etc/falcon/conf"
   falcon_conf_dir = "/etc/falcon/conf"
-  if Script.is_hdp_stack_greater_or_equal("2.2"):
+  if Script.is_stack_greater_or_equal("2.2"):
     falcon_conf_dir = format("/usr/hdp/current/{component_directory}/conf")
     falcon_conf_dir = format("/usr/hdp/current/{component_directory}/conf")
 
 
   # Security related/required params
   # Security related/required params

+ 3 - 3
ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py

@@ -24,7 +24,7 @@ from flume import get_desired_state
 
 
 from resource_management import *
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.flume_agent_helper import find_expected_agent_names
 from resource_management.libraries.functions.flume_agent_helper import find_expected_agent_names
 from resource_management.libraries.functions.flume_agent_helper import get_flume_status
 from resource_management.libraries.functions.flume_agent_helper import get_flume_status
 
 
@@ -89,12 +89,12 @@ class FlumeHandlerLinux(FlumeHandler):
 
 
     # this function should not execute if the version can't be determined or
     # this function should not execute if the version can't be determined or
     # is not at least HDP 2.2.0.0
     # is not at least HDP 2.2.0.0
-    if not params.version or Script.is_hdp_stack_less_than("2.2"):
+    if not params.version or Script.is_stack_less_than("2.2"):
       return
       return
 
 
     Logger.info("Executing Flume Stack Upgrade pre-restart")
     Logger.info("Executing Flume Stack Upgrade pre-restart")
     conf_select.select(params.stack_name, "flume", params.version)
     conf_select.select(params.stack_name, "flume", params.version)
-    hdp_select.select("flume-server", params.version)
+    stack_select.select("flume-server", params.version)
 
 
     # only restore on upgrade, not downgrade
     # only restore on upgrade, not downgrade
     if params.upgrade_direction == Direction.UPGRADE:
     if params.upgrade_direction == Direction.UPGRADE:

+ 3 - 3
ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py

@@ -19,7 +19,7 @@ limitations under the License.
 from ambari_commons import OSCheck
 from ambari_commons import OSCheck
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 
 
@@ -42,7 +42,7 @@ proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']
 security_enabled = False
 security_enabled = False
 
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
 
 
 # hadoop default parameters
 # hadoop default parameters
 flume_bin = '/usr/bin/flume-ng'
 flume_bin = '/usr/bin/flume-ng'
@@ -50,7 +50,7 @@ flume_hive_home = '/usr/lib/hive'
 flume_hcat_home = '/usr/lib/hive-hcatalog'
 flume_hcat_home = '/usr/lib/hive-hcatalog'
 
 
 # hadoop parameters for 2.2+
 # hadoop parameters for 2.2+
-if Script.is_hdp_stack_greater_or_equal("2.2"):
+if Script.is_stack_greater_or_equal("2.2"):
   flume_bin = '/usr/hdp/current/flume-server/bin/flume-ng'
   flume_bin = '/usr/hdp/current/flume-server/bin/flume-ng'
   flume_hive_home = '/usr/hdp/current/hive-metastore'
   flume_hive_home = '/usr/hdp/current/hive-metastore'
   flume_hcat_home = '/usr/hdp/current/hive-webhcat'
   flume_hcat_home = '/usr/hdp/current/hive-webhcat'

+ 1 - 1
ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params_linux.py

@@ -29,7 +29,7 @@ upgrade_direction = default("/commandParams/upgrade_direction", Direction.UPGRAD
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 
 
 flume_conf_dir = '/etc/flume/conf'
 flume_conf_dir = '/etc/flume/conf'
-if Script.is_hdp_stack_greater_or_equal("2.2"):
+if Script.is_stack_greater_or_equal("2.2"):
   flume_conf_dir = '/usr/hdp/current/flume-server/conf'
   flume_conf_dir = '/usr/hdp/current/flume-server/conf'
 
 
 flume_user = 'flume'
 flume_user = 'flume'

+ 1 - 1
ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py

@@ -21,7 +21,7 @@ from resource_management.core.resources.system import Execute
 from resource_management.core.logger import Logger
 from resource_management.core.logger import Logger
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.check_process_status import check_process_status
 try:
 try:
-    from resource_management.libraries.functions import hdp_select as hadoop_select
+    from resource_management.libraries.functions import stack_select as hadoop_select
 except ImportError:
 except ImportError:
     from resource_management.libraries.functions import phd_select as hadoop_select
     from resource_management.libraries.functions import phd_select as hadoop_select
 
 

+ 5 - 5
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py

@@ -21,7 +21,7 @@ limitations under the License.
 import sys
 import sys
 from resource_management import *
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from hbase import hbase
 from hbase import hbase
 from ambari_commons import OSCheck, OSConst
 from ambari_commons import OSCheck, OSConst
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyImpl
@@ -57,13 +57,13 @@ class HbaseClientDefault(HbaseClient):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "hbase", params.version)
       conf_select.select(params.stack_name, "hbase", params.version)
-      hdp_select.select("hbase-client", params.version)
+      stack_select.select("hbase-client", params.version)
 
 
       # phoenix may not always be deployed
       # phoenix may not always be deployed
       try:
       try:
-        hdp_select.select("phoenix-client", params.version)
+        stack_select.select("phoenix-client", params.version)
       except Exception as e:
       except Exception as e:
         print "Ignoring error due to missing phoenix-client"
         print "Ignoring error due to missing phoenix-client"
         print str(e)
         print str(e)
@@ -73,7 +73,7 @@ class HbaseClientDefault(HbaseClient):
       # of the final "CLIENTS" group and we need to ensure that hadoop-client
       # of the final "CLIENTS" group and we need to ensure that hadoop-client
       # is also set
       # is also set
       conf_select.select(params.stack_name, "hadoop", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      hdp_select.select("hadoop-client", params.version)
+      stack_select.select("hadoop-client", params.version)
 
 
 
 
 if __name__ == "__main__":
 if __name__ == "__main__":

+ 5 - 5
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py

@@ -28,9 +28,9 @@ from ambari_commons.str_utils import cbool, cint
 
 
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import is_empty
 from resource_management.libraries.functions import is_empty
@@ -51,10 +51,10 @@ component_directory = status_params.component_directory
 etc_prefix_dir = "/etc/hbase"
 etc_prefix_dir = "/etc/hbase"
 
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
 
 
 # hadoop default parameters
 # hadoop default parameters
-hadoop_bin_dir = hdp_select.get_hadoop_dir("bin")
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
 daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
 region_mover = "/usr/lib/hbase/bin/region_mover.rb"
 region_mover = "/usr/lib/hbase/bin/region_mover.rb"
@@ -63,7 +63,7 @@ hbase_cmd = "/usr/lib/hbase/bin/hbase"
 hbase_max_direct_memory_size = None
 hbase_max_direct_memory_size = None
 
 
 # hadoop parameters for 2.2+
 # hadoop parameters for 2.2+
-if Script.is_hdp_stack_greater_or_equal("2.2"):
+if Script.is_stack_greater_or_equal("2.2"):
   daemon_script = format('/usr/hdp/current/hbase-client/bin/hbase-daemon.sh')
   daemon_script = format('/usr/hdp/current/hbase-client/bin/hbase-daemon.sh')
   region_mover = format('/usr/hdp/current/hbase-client/bin/region_mover.rb')
   region_mover = format('/usr/hdp/current/hbase-client/bin/region_mover.rb')
   region_drainer = format('/usr/hdp/current/hbase-client/bin/draining_servers.rb')
   region_drainer = format('/usr/hdp/current/hbase-client/bin/draining_servers.rb')

+ 3 - 3
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py

@@ -18,7 +18,7 @@ limitations under the License.
 """
 """
 
 
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.script import Script
 from resource_management.libraries.script import Script
 from phoenix_service import phoenix_service
 from phoenix_service import phoenix_service
 from hbase import hbase
 from hbase import hbase
@@ -59,10 +59,10 @@ class PhoenixQueryServer(Script):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    if Script.is_hdp_stack_greater_or_equal("2.3"):
+    if Script.is_stack_greater_or_equal("2.3"):
       # phoenix uses hbase configs
       # phoenix uses hbase configs
       conf_select.select(params.stack_name, "hbase", params.version)
       conf_select.select(params.stack_name, "hbase", params.version)
-      hdp_select.select("phoenix-server", params.version)
+      stack_select.select("phoenix-server", params.version)
 
 
 
 
   def status(self, env):
   def status(self, env):

+ 1 - 1
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/status_params.py

@@ -51,5 +51,5 @@ else:
 
 
   hbase_conf_dir = "/etc/hbase/conf"
   hbase_conf_dir = "/etc/hbase/conf"
   limits_conf_dir = "/etc/security/limits.d"
   limits_conf_dir = "/etc/security/limits.d"
-  if Script.is_hdp_stack_greater_or_equal("2.2"):
+  if Script.is_stack_greater_or_equal("2.2"):
     hbase_conf_dir = format("/usr/hdp/current/{component_directory}/conf")
     hbase_conf_dir = format("/usr/hdp/current/{component_directory}/conf")

+ 4 - 4
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py

@@ -22,16 +22,16 @@ from resource_management import *
 from resource_management.core.resources.system import Execute
 from resource_management.core.resources.system import Execute
 from resource_management.core import shell
 from resource_management.core import shell
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from resource_management.libraries.functions.decorator import retry
 from resource_management.libraries.functions.decorator import retry
 
 
 def prestart(env, hdp_component):
 def prestart(env, hdp_component):
   import params
   import params
 
 
-  if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+  if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
     conf_select.select(params.stack_name, "hbase", params.version)
     conf_select.select(params.stack_name, "hbase", params.version)
-    hdp_select.select(hdp_component, params.version)
+    stack_select.select(hdp_component, params.version)
 
 
 def post_regionserver(env):
 def post_regionserver(env):
   import params
   import params

+ 4 - 4
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py

@@ -20,8 +20,8 @@ import datanode_upgrade
 from hdfs_datanode import datanode
 from hdfs_datanode import datanode
 from resource_management import *
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from resource_management.libraries.functions.security_commons import build_expectations, \
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, FILE_TYPE_XML
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, FILE_TYPE_XML
 from hdfs import hdfs
 from hdfs import hdfs
@@ -87,9 +87,9 @@ class DataNodeDefault(DataNode):
     Logger.info("Executing DataNode Stack Upgrade pre-restart")
     Logger.info("Executing DataNode Stack Upgrade pre-restart")
     import params
     import params
     env.set_params(params)
     env.set_params(params)
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      hdp_select.select("hadoop-hdfs-datanode", params.version)
+      stack_select.select("hadoop-hdfs-datanode", params.version)
 
 
   def post_upgrade_restart(self, env, upgrade_type=None):
   def post_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing DataNode Stack Upgrade post-restart")
     Logger.info("Executing DataNode Stack Upgrade post-restart")

+ 3 - 3
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py

@@ -19,7 +19,7 @@ limitations under the License.
 
 
 from resource_management import *
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.security_commons import build_expectations, \
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   FILE_TYPE_XML
   FILE_TYPE_XML
@@ -60,9 +60,9 @@ class HdfsClientDefault(HdfsClient):
   def pre_upgrade_restart(self, env, upgrade_type=None):
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      hdp_select.select("hadoop-client", params.version)
+      stack_select.select("hadoop-client", params.version)
 
 
   def security_status(self, env):
   def security_status(self, env):
     import status_params
     import status_params

+ 4 - 4
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py

@@ -19,9 +19,9 @@ limitations under the License.
 
 
 from resource_management import *
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.version import compare_versions, \
 from resource_management.libraries.functions.version import compare_versions, \
-  format_hdp_stack_version
+  format_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.security_commons import build_expectations, \
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
@@ -50,9 +50,9 @@ class JournalNodeDefault(JournalNode):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      hdp_select.select("hadoop-hdfs-journalnode", params.version)
+      stack_select.select("hadoop-hdfs-journalnode", params.version)
 
 
   def start(self, env, upgrade_type=None):
   def start(self, env, upgrade_type=None):
     import params
     import params

+ 4 - 4
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py

@@ -29,9 +29,9 @@ from resource_management import Script
 from resource_management.core.resources.system import Execute, File
 from resource_management.core.resources.system import Execute, File
 from resource_management.core import shell
 from resource_management.core import shell
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import Direction
 from resource_management.libraries.functions import Direction
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.security_commons import build_expectations, \
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
@@ -190,14 +190,14 @@ class NameNodeDefault(NameNode):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       # When downgrading an Express Upgrade, the first thing we do is to revert the symlinks.
       # When downgrading an Express Upgrade, the first thing we do is to revert the symlinks.
       # Therefore, we cannot call this code in that scenario.
       # Therefore, we cannot call this code in that scenario.
       call_if = [("rolling", "upgrade"), ("rolling", "downgrade"), ("nonrolling", "upgrade")]
       call_if = [("rolling", "upgrade"), ("rolling", "downgrade"), ("nonrolling", "upgrade")]
       for e in call_if:
       for e in call_if:
         if (upgrade_type, params.upgrade_direction) == e:
         if (upgrade_type, params.upgrade_direction) == e:
           conf_select.select(params.stack_name, "hadoop", params.version)
           conf_select.select(params.stack_name, "hadoop", params.version)
-      hdp_select.select("hadoop-hdfs-namenode", params.version)
+      stack_select.select("hadoop-hdfs-namenode", params.version)
 
 
   def post_upgrade_restart(self, env, upgrade_type=None):
   def post_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade post-restart")
     Logger.info("Executing Stack Upgrade post-restart")

+ 4 - 4
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py

@@ -25,8 +25,8 @@ from resource_management.libraries.functions.security_commons import build_expec
 from hdfs_nfsgateway import nfsgateway
 from hdfs_nfsgateway import nfsgateway
 from hdfs import hdfs
 from hdfs import hdfs
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 
 
 
 
 class NFSGateway(Script):
 class NFSGateway(Script):
@@ -45,9 +45,9 @@ class NFSGateway(Script):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    if Script.is_hdp_stack_greater_or_equal('2.3.0.0'):
+    if Script.is_stack_greater_or_equal('2.3.0.0'):
       conf_select.select(params.stack_name, "hadoop", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      hdp_select.select("hadoop-hdfs-nfs3", params.version)
+      stack_select.select("hadoop-hdfs-nfs3", params.version)
 
 
   def start(self, env, upgrade_type=None):
   def start(self, env, upgrade_type=None):
     import params
     import params

+ 10 - 10
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py

@@ -27,9 +27,9 @@ from ambari_commons.os_check import OSCheck
 from ambari_commons.str_utils import cbool, cint
 from ambari_commons.str_utils import cbool, cint
 
 
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_klist_path
 from resource_management.libraries.functions import get_klist_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
@@ -47,7 +47,7 @@ tmp_dir = Script.get_tmp_dir()
 stack_name = default("/hostLevelParams/stack_name", None)
 stack_name = default("/hostLevelParams/stack_name", None)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
 agent_stack_retry_on_unavailability = cbool(config["hostLevelParams"]["agent_stack_retry_on_unavailability"])
 agent_stack_retry_on_unavailability = cbool(config["hostLevelParams"]["agent_stack_retry_on_unavailability"])
 agent_stack_retry_count = cint(config["hostLevelParams"]["agent_stack_retry_count"])
 agent_stack_retry_count = cint(config["hostLevelParams"]["agent_stack_retry_count"])
 
 
@@ -77,17 +77,17 @@ secure_dn_ports_are_in_use = False
 
 
 # hadoop default parameters
 # hadoop default parameters
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-hadoop_libexec_dir = hdp_select.get_hadoop_dir("libexec")
-hadoop_bin = hdp_select.get_hadoop_dir("sbin")
-hadoop_bin_dir = hdp_select.get_hadoop_dir("bin")
-hadoop_home = hdp_select.get_hadoop_dir("home")
+hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
+hadoop_bin = stack_select.get_hadoop_dir("sbin")
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
+hadoop_home = stack_select.get_hadoop_dir("home")
 hadoop_secure_dn_user = hdfs_user
 hadoop_secure_dn_user = hdfs_user
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_secure_dir = os.path.join(hadoop_conf_dir, "secure")
 hadoop_conf_secure_dir = os.path.join(hadoop_conf_dir, "secure")
-hadoop_lib_home = hdp_select.get_hadoop_dir("lib")
+hadoop_lib_home = stack_select.get_hadoop_dir("lib")
 
 
 # hadoop parameters for 2.2+
 # hadoop parameters for 2.2+
-if Script.is_hdp_stack_greater_or_equal("2.2"):
+if Script.is_stack_greater_or_equal("2.2"):
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
 
 
   if not security_enabled:
   if not security_enabled:
@@ -114,7 +114,7 @@ limits_conf_dir = "/etc/security/limits.d"
 hdfs_user_nofile_limit = default("/configurations/hadoop-env/hdfs_user_nofile_limit", "128000")
 hdfs_user_nofile_limit = default("/configurations/hadoop-env/hdfs_user_nofile_limit", "128000")
 hdfs_user_nproc_limit = default("/configurations/hadoop-env/hdfs_user_nproc_limit", "65536")
 hdfs_user_nproc_limit = default("/configurations/hadoop-env/hdfs_user_nproc_limit", "65536")
 
 
-create_lib_snappy_symlinks = not Script.is_hdp_stack_greater_or_equal("2.2")
+create_lib_snappy_symlinks = not Script.is_stack_greater_or_equal("2.2")
 jsvc_path = "/usr/lib/bigtop-utils"
 jsvc_path = "/usr/lib/bigtop-utils"
 
 
 execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir
 execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir

+ 4 - 4
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py

@@ -19,8 +19,8 @@ limitations under the License.
 
 
 from resource_management import *
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from resource_management.libraries.functions.security_commons import build_expectations, \
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   FILE_TYPE_XML
   FILE_TYPE_XML
@@ -71,9 +71,9 @@ class SNameNodeDefault(SNameNode):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      hdp_select.select("hadoop-hdfs-secondarynamenode", params.version)
+      stack_select.select("hadoop-hdfs-secondarynamenode", params.version)
 
 
   def security_status(self, env):
   def security_status(self, env):
     import status_params
     import status_params

+ 3 - 3
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py

@@ -224,12 +224,12 @@ def service(action=None, name=None, user=None, options="", create_pid_dir=False,
     hadoop_secure_dn_pid_file = format("{hadoop_secure_dn_pid_dir}/hadoop_secure_dn.pid")
     hadoop_secure_dn_pid_file = format("{hadoop_secure_dn_pid_dir}/hadoop_secure_dn.pid")
 
 
     # At Champlain stack and further, we may start datanode as a non-root even in secure cluster
     # At Champlain stack and further, we may start datanode as a non-root even in secure cluster
-    if not (params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0) or params.secure_dn_ports_are_in_use:
+    if not (params.stack_version_formatted != "" and compare_versions(params.stack_version_formatted, '2.2') >= 0) or params.secure_dn_ports_are_in_use:
       user = "root"
       user = "root"
       pid_file = format(
       pid_file = format(
         "{hadoop_pid_dir_prefix}/{hdfs_user}/hadoop-{hdfs_user}-{name}.pid")
         "{hadoop_pid_dir_prefix}/{hdfs_user}/hadoop-{hdfs_user}-{name}.pid")
 
 
-    if action == 'stop' and (params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0) and \
+    if action == 'stop' and (params.stack_version_formatted != "" and compare_versions(params.stack_version_formatted, '2.2') >= 0) and \
       os.path.isfile(hadoop_secure_dn_pid_file):
       os.path.isfile(hadoop_secure_dn_pid_file):
         # We need special handling for this case to handle the situation
         # We need special handling for this case to handle the situation
         # when we configure non-root secure DN and then restart it
         # when we configure non-root secure DN and then restart it
@@ -354,7 +354,7 @@ def get_hdfs_binary(distro_component_name):
   if params.stack_name == "HDP":
   if params.stack_name == "HDP":
     # This was used in HDP 2.1 and earlier
     # This was used in HDP 2.1 and earlier
     hdfs_binary = "hdfs"
     hdfs_binary = "hdfs"
-    if Script.is_hdp_stack_greater_or_equal("2.2"):
+    if Script.is_stack_greater_or_equal("2.2"):
       hdfs_binary = "/usr/hdp/current/{0}/bin/hdfs".format(distro_component_name)
       hdfs_binary = "/usr/hdp/current/{0}/bin/hdfs".format(distro_component_name)
 
 
   return hdfs_binary
   return hdfs_binary

+ 2 - 2
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py

@@ -23,7 +23,7 @@ from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyImpl
 from resource_management.core.logger import Logger
 from resource_management.core.logger import Logger
 from resource_management.core.exceptions import ClientComponentHasNoStatus
 from resource_management.core.exceptions import ClientComponentHasNoStatus
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 
 
@@ -78,7 +78,7 @@ class HCatClientDefault(HCatClient):
     # HCat client doesn't have a first-class entry in hdp-select. Since clients always
     # HCat client doesn't have a first-class entry in hdp-select. Since clients always
     # update after daemons, this ensures that the hcat directories are correct on hosts
     # update after daemons, this ensures that the hcat directories are correct on hosts
     # which do not include the WebHCat daemon
     # which do not include the WebHCat daemon
-    hdp_select.select("hive-webhcat", params.version)
+    stack_select.select("hive-webhcat", params.version)
 
 
 
 
 if __name__ == "__main__":
 if __name__ == "__main__":

+ 2 - 2
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py

@@ -107,7 +107,7 @@ def hive(name=None):
 
 
   if name == 'hiveserver2':
   if name == 'hiveserver2':
     # HDP 2.1.* or lower
     # HDP 2.1.* or lower
-    if params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, "2.2.0.0") < 0:
+    if params.stack_version_formatted_major != "" and compare_versions(params.stack_version_formatted_major, "2.2.0.0") < 0:
       params.HdfsResource(params.webhcat_apps_dir,
       params.HdfsResource(params.webhcat_apps_dir,
                             type="directory",
                             type="directory",
                             action="create_on_execute",
                             action="create_on_execute",
@@ -134,7 +134,7 @@ def hive(name=None):
     # ****** Begin Copy Tarballs ******
     # ****** Begin Copy Tarballs ******
     # *********************************
     # *********************************
     # HDP 2.2 or higher, copy mapreduce.tar.gz to HDFS
     # HDP 2.2 or higher, copy mapreduce.tar.gz to HDFS
-    if params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, '2.2') >= 0:
+    if params.stack_version_formatted_major != "" and compare_versions(params.stack_version_formatted_major, '2.2') >= 0:
       copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
       copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
       copy_to_hdfs("tez", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
       copy_to_hdfs("tez", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
 
 

+ 3 - 3
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py

@@ -20,7 +20,7 @@ limitations under the License.
 import sys
 import sys
 from resource_management import *
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from hive import hive
 from hive import hive
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons import OSConst
 from ambari_commons import OSConst
@@ -55,10 +55,10 @@ class HiveClientDefault(HiveClient):
 
 
     import params
     import params
     env.set_params(params)
     env.set_params(params)
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "hive", params.version)
       conf_select.select(params.stack_name, "hive", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      hdp_select.select("hadoop-client", params.version)
+      stack_select.select("hadoop-client", params.version)
 
 
 
 
 if __name__ == "__main__":
 if __name__ == "__main__":

+ 6 - 6
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py

@@ -23,10 +23,10 @@ from resource_management.core.logger import Logger
 from resource_management.core.resources.system import Execute, Directory
 from resource_management.core.resources.system import Execute, Directory
 from resource_management.libraries.script import Script
 from resource_management.libraries.script import Script
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.functions.security_commons import build_expectations
 from resource_management.libraries.functions.security_commons import build_expectations
 from resource_management.libraries.functions.security_commons import cached_kinit_executor
 from resource_management.libraries.functions.security_commons import cached_kinit_executor
@@ -102,15 +102,15 @@ class HiveMetastoreDefault(HiveMetastore):
 
 
     env.set_params(params)
     env.set_params(params)
 
 
-    is_stack_hdp_23 = Script.is_hdp_stack_greater_or_equal("2.3")
+    is_stack_hdp_23 = Script.is_stack_greater_or_equal("2.3")
     is_upgrade = params.upgrade_direction == Direction.UPGRADE
     is_upgrade = params.upgrade_direction == Direction.UPGRADE
 
 
     if is_stack_hdp_23 and is_upgrade:
     if is_stack_hdp_23 and is_upgrade:
       self.upgrade_schema(env)
       self.upgrade_schema(env)
 
 
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "hive", params.version)
       conf_select.select(params.stack_name, "hive", params.version)
-      hdp_select.select("hive-metastore", params.version)
+      stack_select.select("hive-metastore", params.version)
 
 
 
 
   def security_status(self, env):
   def security_status(self, env):
@@ -229,7 +229,7 @@ class HiveMetastoreDefault(HiveMetastore):
     # we need to choose the original legacy location
     # we need to choose the original legacy location
     schematool_hive_server_conf_dir = params.hive_server_conf_dir
     schematool_hive_server_conf_dir = params.hive_server_conf_dir
     if params.current_version is not None:
     if params.current_version is not None:
-      current_version = format_hdp_stack_version(params.current_version)
+      current_version = format_stack_version(params.current_version)
       if compare_versions(current_version, "2.3") < 0:
       if compare_versions(current_version, "2.3") < 0:
         schematool_hive_server_conf_dir = LEGACY_HIVE_SERVER_CONF
         schematool_hive_server_conf_dir = LEGACY_HIVE_SERVER_CONF
 
 

+ 5 - 5
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py

@@ -22,12 +22,12 @@ limitations under the License.
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
-from resource_management.libraries.functions.get_hdp_version import get_hdp_version
+from resource_management.libraries.functions.get_stack_version import get_stack_version
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.check_process_status import check_process_status
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from resource_management.libraries.functions.security_commons import build_expectations, \
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   FILE_TYPE_XML
   FILE_TYPE_XML
@@ -117,9 +117,9 @@ class HiveServerDefault(HiveServer):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "hive", params.version)
       conf_select.select(params.stack_name, "hive", params.version)
-      hdp_select.select("hive-server2", params.version)
+      stack_select.select("hive-server2", params.version)
 
 
       # Copy mapreduce.tar.gz and tez.tar.gz to HDFS
       # Copy mapreduce.tar.gz and tez.tar.gz to HDFS
       resource_created = copy_to_hdfs(
       resource_created = copy_to_hdfs(

+ 3 - 3
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py

@@ -22,12 +22,12 @@ limitations under the License.
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
-from resource_management.libraries.functions.get_hdp_version import get_hdp_version
+from resource_management.libraries.functions.get_stack_version import get_stack_version
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.check_process_status import check_process_status
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from resource_management.libraries.functions.security_commons import build_expectations, \
 from resource_management.libraries.functions.security_commons import build_expectations, \
     cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
     cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
     FILE_TYPE_XML
     FILE_TYPE_XML

+ 8 - 8
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py

@@ -24,8 +24,8 @@ from resource_management.core.exceptions import Fail
 from resource_management.core.resources.system import Execute
 from resource_management.core.resources.system import Execute
 from resource_management.core import shell
 from resource_management.core import shell
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
-from resource_management.libraries.functions import hdp_select
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.functions.version import compare_versions
 
 
 
 
@@ -74,24 +74,24 @@ def post_upgrade_deregister():
   Execute(command, user=params.hive_user, path=hive_execute_path, tries=1 )
   Execute(command, user=params.hive_user, path=hive_execute_path, tries=1 )
 
 
 
 
-def _get_hive_execute_path(hdp_stack_version):
+def _get_hive_execute_path(stack_version_formatted):
   """
   """
   Returns the exact execute path to use for the given stack-version.
   Returns the exact execute path to use for the given stack-version.
   This method does not return the "current" path
   This method does not return the "current" path
-  :param hdp_stack_version: Exact stack-version to use in the new path
+  :param stack_version_formatted: Exact stack-version to use in the new path
   :return: Hive execute path for the exact hdp stack-version
   :return: Hive execute path for the exact hdp stack-version
   """
   """
   import params
   import params
 
 
   hive_execute_path = params.execute_path
   hive_execute_path = params.execute_path
-  formatted_stack_version = format_hdp_stack_version(hdp_stack_version)
+  formatted_stack_version = format_stack_version(stack_version_formatted)
   if formatted_stack_version and compare_versions(formatted_stack_version, "2.2") >= 0:
   if formatted_stack_version and compare_versions(formatted_stack_version, "2.2") >= 0:
     # hive_bin
     # hive_bin
-    new_hive_bin = format('/usr/hdp/{hdp_stack_version}/hive/bin')
+    new_hive_bin = format('/usr/hdp/{stack_version_formatted}/hive/bin')
     if (os.pathsep + params.hive_bin) in hive_execute_path:
     if (os.pathsep + params.hive_bin) in hive_execute_path:
       hive_execute_path = hive_execute_path.replace(os.pathsep + params.hive_bin, os.pathsep + new_hive_bin)
       hive_execute_path = hive_execute_path.replace(os.pathsep + params.hive_bin, os.pathsep + new_hive_bin)
     # hadoop_bin_dir
     # hadoop_bin_dir
-    new_hadoop_bin = hdp_select.get_hadoop_dir_for_stack_version("bin", hdp_stack_version)
+    new_hadoop_bin = stack_select.get_hadoop_dir_for_stack_version("bin", stack_version_formatted)
     old_hadoop_bin = params.hadoop_bin_dir
     old_hadoop_bin = params.hadoop_bin_dir
     if new_hadoop_bin and len(new_hadoop_bin) > 0 and (os.pathsep + old_hadoop_bin) in hive_execute_path:
     if new_hadoop_bin and len(new_hadoop_bin) > 0 and (os.pathsep + old_hadoop_bin) in hive_execute_path:
       hive_execute_path = hive_execute_path.replace(os.pathsep + old_hadoop_bin, os.pathsep + new_hadoop_bin)
       hive_execute_path = hive_execute_path.replace(os.pathsep + old_hadoop_bin, os.pathsep + new_hadoop_bin)
@@ -117,7 +117,7 @@ def _get_current_hiveserver_version():
       source_version = params.current_version
       source_version = params.current_version
     hive_execute_path = _get_hive_execute_path(source_version)
     hive_execute_path = _get_hive_execute_path(source_version)
     version_hive_bin = params.hive_bin
     version_hive_bin = params.hive_bin
-    formatted_source_version = format_hdp_stack_version(source_version)
+    formatted_source_version = format_stack_version(source_version)
     if formatted_source_version and compare_versions(formatted_source_version, "2.2") >= 0:
     if formatted_source_version and compare_versions(formatted_source_version, "2.2") >= 0:
       version_hive_bin = format('/usr/hdp/{source_version}/hive/bin')
       version_hive_bin = format('/usr/hdp/{source_version}/hive/bin')
     command = format('{version_hive_bin}/hive --version')
     command = format('{version_hive_bin}/hive --version')

+ 8 - 8
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py

@@ -32,7 +32,7 @@ from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.is_empty import is_empty
 from resource_management.libraries.functions.is_empty import is_empty
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.copy_tarball import STACK_VERSION_PATTERN
 from resource_management.libraries.functions.copy_tarball import STACK_VERSION_PATTERN
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
@@ -53,11 +53,11 @@ hostname = config["hostname"]
 
 
 # This is expected to be of the form #.#.#.#
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version_major = format_hdp_stack_version(stack_version_unformatted)
-stack_is_hdp21 = Script.is_hdp_stack_less_than("2.2")
+stack_version_formatted_major = format_stack_version(stack_version_unformatted)
+stack_is_hdp21 = Script.is_stack_less_than("2.2")
 
 
 # this is not available on INSTALL action because hdp-select is not available
 # this is not available on INSTALL action because hdp-select is not available
-hdp_stack_version = functions.get_hdp_version('hive-server2')
+stack_version_formatted = functions.get_stack_version('hive-server2')
 
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade.
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade.
 # It cannot be used during the initial Cluser Install because the version is not yet known.
 # It cannot be used during the initial Cluser Install because the version is not yet known.
@@ -109,7 +109,7 @@ webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
 
 
 # Starting from HDP2.3 drop should be executed with purge suffix
 # Starting from HDP2.3 drop should be executed with purge suffix
 purge_tables = "false"
 purge_tables = "false"
-if Script.is_hdp_stack_greater_or_equal("2.3"):
+if Script.is_stack_greater_or_equal("2.3"):
   purge_tables = 'true'
   purge_tables = 'true'
 
 
   # this is NOT a typo.  HDP-2.3 configs for hcatalog/webhcat point to a
   # this is NOT a typo.  HDP-2.3 configs for hcatalog/webhcat point to a
@@ -117,7 +117,7 @@ if Script.is_hdp_stack_greater_or_equal("2.3"):
   hcat_conf_dir = '/usr/hdp/current/hive-webhcat/etc/hcatalog'
   hcat_conf_dir = '/usr/hdp/current/hive-webhcat/etc/hcatalog'
   config_dir = '/usr/hdp/current/hive-webhcat/etc/webhcat'
   config_dir = '/usr/hdp/current/hive-webhcat/etc/webhcat'
 
 
-if Script.is_hdp_stack_greater_or_equal("2.2"):
+if Script.is_stack_greater_or_equal("2.2"):
   hive_specific_configs_supported = True
   hive_specific_configs_supported = True
 
 
   component_directory = status_params.component_directory
   component_directory = status_params.component_directory
@@ -287,7 +287,7 @@ target = format("{hive_lib}/{jdbc_jar_name}")
 jars_in_hive_lib = format("{hive_lib}/*.jar")
 jars_in_hive_lib = format("{hive_lib}/*.jar")
 
 
 
 
-if Script.is_hdp_stack_less_than("2.2"):
+if Script.is_stack_less_than("2.2"):
   source_jdbc_file = target
   source_jdbc_file = target
 else:
 else:
   # normally, the JDBC driver would be referenced by /usr/hdp/current/.../foo.jar
   # normally, the JDBC driver would be referenced by /usr/hdp/current/.../foo.jar
@@ -304,7 +304,7 @@ start_metastore_path = format("{tmp_dir}/start_metastore_script")
 hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
 hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
 
 
 if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
 if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
-  if Script.is_hdp_stack_less_than("2.2"):
+  if Script.is_stack_less_than("2.2"):
     hive_heapsize = config['configurations']['hive-site']['hive.heapsize']
     hive_heapsize = config['configurations']['hive-site']['hive.heapsize']
   else:
   else:
     hive_heapsize = config['configurations']['hive-env']['hive.heapsize']
     hive_heapsize = config['configurations']['hive-env']['hive.heapsize']

+ 1 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py

@@ -26,7 +26,7 @@ config = Script.get_config()
 
 
 # This is expected to be of the form #.#.#.#
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
 
 
 hdp_root = None
 hdp_root = None
 hive_conf_dir = None
 hive_conf_dir = None

+ 4 - 4
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py

@@ -21,7 +21,7 @@ limitations under the License.
 from ambari_commons import OSCheck
 from ambari_commons import OSCheck
 
 
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
@@ -72,7 +72,7 @@ else:
 
 
   # default configuration directories
   # default configuration directories
   hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
   hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-  hadoop_bin_dir = hdp_select.get_hadoop_dir("bin")
+  hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
   webhcat_conf_dir = '/etc/hive-webhcat/conf'
   webhcat_conf_dir = '/etc/hive-webhcat/conf'
   hive_etc_dir_prefix = "/etc/hive"
   hive_etc_dir_prefix = "/etc/hive"
   hive_conf_dir = "/etc/hive/conf"
   hive_conf_dir = "/etc/hive/conf"
@@ -82,13 +82,13 @@ else:
   hive_server_conf_dir = "/etc/hive/conf.server"
   hive_server_conf_dir = "/etc/hive/conf.server"
 
 
   # HDP 2.2+
   # HDP 2.2+
-  if Script.is_hdp_stack_greater_or_equal("2.2"):
+  if Script.is_stack_greater_or_equal("2.2"):
     webhcat_conf_dir = '/usr/hdp/current/hive-webhcat/conf'
     webhcat_conf_dir = '/usr/hdp/current/hive-webhcat/conf'
     hive_conf_dir = format("/usr/hdp/current/{component_directory}/conf")
     hive_conf_dir = format("/usr/hdp/current/{component_directory}/conf")
     hive_client_conf_dir = format("/usr/hdp/current/{component_directory}/conf")
     hive_client_conf_dir = format("/usr/hdp/current/{component_directory}/conf")
 
 
   # HDP 2.3+
   # HDP 2.3+
-  if Script.is_hdp_stack_greater_or_equal("2.3"):
+  if Script.is_stack_greater_or_equal("2.3"):
     # ranger is only compatible with this location on HDP 2.3+, not HDP 2.2
     # ranger is only compatible with this location on HDP 2.3+, not HDP 2.2
     hive_server_conf_dir = format("/usr/hdp/current/{component_directory}/conf/conf.server")
     hive_server_conf_dir = format("/usr/hdp/current/{component_directory}/conf/conf.server")
 
 

+ 1 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py

@@ -90,7 +90,7 @@ def webhcat():
             )
             )
 
 
   # if we're in an upgrade of a secure cluster, make sure hive-site and yarn-site are created
   # if we're in an upgrade of a secure cluster, make sure hive-site and yarn-site are created
-  if Script.is_hdp_stack_greater_or_equal("2.3") and params.version:
+  if Script.is_stack_greater_or_equal("2.3") and params.version:
     XmlConfig("hive-site.xml",
     XmlConfig("hive-site.xml",
       conf_dir = format("/usr/hdp/{version}/hive/conf"),
       conf_dir = format("/usr/hdp/{version}/hive/conf"),
       configurations = params.config['configurations']['hive-site'],
       configurations = params.config['configurations']['hive-site'],

+ 3 - 3
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py

@@ -20,7 +20,7 @@ Ambari Agent
 """
 """
 from resource_management import *
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.security_commons import build_expectations, \
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   FILE_TYPE_XML
   FILE_TYPE_XML
@@ -75,11 +75,11 @@ class WebHCatServerDefault(WebHCatServer):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       # webhcat has no conf, but uses hadoop home, so verify that regular hadoop conf is set
       # webhcat has no conf, but uses hadoop home, so verify that regular hadoop conf is set
       conf_select.select(params.stack_name, "hive-hcatalog", params.version)
       conf_select.select(params.stack_name, "hive-hcatalog", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      hdp_select.select("hive-webhcat", params.version)
+      stack_select.select("hive-webhcat", params.version)
 
 
   def security_status(self, env):
   def security_status(self, env):
     import status_params
     import status_params

+ 2 - 2
ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka.py

@@ -20,7 +20,7 @@ limitations under the License.
 import collections
 import collections
 import os
 import os
 
 
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions.version import format_stack_version, compare_versions
 from resource_management.libraries.resources.properties_file import PropertiesFile
 from resource_management.libraries.resources.properties_file import PropertiesFile
 from resource_management.libraries.resources.template_config import TemplateConfig
 from resource_management.libraries.resources.template_config import TemplateConfig
 from resource_management.core.resources.system import Directory, Execute, File, Link
 from resource_management.core.resources.system import Directory, Execute, File, Link
@@ -39,7 +39,7 @@ def kafka(upgrade_type=None):
     # This still has an issue of hostnames being alphabetically out-of-order for broker.id in HDP-2.2.
     # This still has an issue of hostnames being alphabetically out-of-order for broker.id in HDP-2.2.
     # Starting in HDP 2.3, Kafka handles the generation of broker.id so Ambari doesn't have to.
     # Starting in HDP 2.3, Kafka handles the generation of broker.id so Ambari doesn't have to.
 
 
-    effective_version = params.hdp_stack_version if upgrade_type is None else format_hdp_stack_version(params.version)
+    effective_version = params.stack_version_formatted if upgrade_type is None else format_stack_version(params.version)
     Logger.info(format("Effective stack version: {effective_version}"))
     Logger.info(format("Effective stack version: {effective_version}"))
 
 
     if effective_version is not None and effective_version != "" and compare_versions(effective_version, '2.2.0.0') >= 0 and compare_versions(effective_version, '2.3.0.0') < 0:
     if effective_version is not None and effective_version != "" and compare_versions(effective_version, '2.2.0.0') >= 0 and compare_versions(effective_version, '2.3.0.0') < 0:

+ 9 - 9
ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka_broker.py

@@ -20,9 +20,9 @@ from resource_management import Script
 from resource_management.core.logger import Logger
 from resource_management.core.logger import Logger
 from resource_management.core.resources.system import Execute, File, Directory
 from resource_management.core.resources.system import Execute, File, Directory
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import Direction
 from resource_management.libraries.functions import Direction
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.check_process_status import check_process_status
 from kafka import ensure_base_directories
 from kafka import ensure_base_directories
@@ -48,22 +48,22 @@ class KafkaBroker(Script):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
-      hdp_select.select("kafka-broker", params.version)
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
+      stack_select.select("kafka-broker", params.version)
 
 
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.3.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.3.0.0') >= 0:
       conf_select.select(params.stack_name, "kafka", params.version)
       conf_select.select(params.stack_name, "kafka", params.version)
 
 
     # This is extremely important since it should only be called if crossing the HDP 2.3.4.0 boundary. 
     # This is extremely important since it should only be called if crossing the HDP 2.3.4.0 boundary. 
     if params.current_version and params.version and params.upgrade_direction:
     if params.current_version and params.version and params.upgrade_direction:
       src_version = dst_version = None
       src_version = dst_version = None
       if params.upgrade_direction == Direction.UPGRADE:
       if params.upgrade_direction == Direction.UPGRADE:
-        src_version = format_hdp_stack_version(params.current_version)
-        dst_version = format_hdp_stack_version(params.version)
+        src_version = format_stack_version(params.current_version)
+        dst_version = format_stack_version(params.version)
       else:
       else:
         # These represent the original values during the UPGRADE direction
         # These represent the original values during the UPGRADE direction
-        src_version = format_hdp_stack_version(params.version)
-        dst_version = format_hdp_stack_version(params.downgrade_from_version)
+        src_version = format_stack_version(params.version)
+        dst_version = format_stack_version(params.downgrade_from_version)
 
 
       if compare_versions(src_version, '2.3.4.0') < 0 and compare_versions(dst_version, '2.3.4.0') >= 0:
       if compare_versions(src_version, '2.3.4.0') < 0 and compare_versions(dst_version, '2.3.4.0') >= 0:
         # Calling the acl migration script requires the configs to be present.
         # Calling the acl migration script requires the configs to be present.

+ 8 - 8
ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py

@@ -19,15 +19,15 @@ limitations under the License.
 """
 """
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions.version import format_stack_version, compare_versions
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from utils import get_bare_principal
 from utils import get_bare_principal
-from resource_management.libraries.functions.get_hdp_version import get_hdp_version
+from resource_management.libraries.functions.get_stack_version import get_stack_version
 from resource_management.libraries.functions.is_empty import is_empty
 from resource_management.libraries.functions.is_empty import is_empty
 import status_params
 import status_params
 from resource_management.core.logger import Logger
 from resource_management.core.logger import Logger
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 
 
@@ -47,7 +47,7 @@ current_version = default("/hostLevelParams/current_version", None)
 host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 
 
 # When downgrading the 'version' and 'current_version' are both pointing to the downgrade-target version
 # When downgrading the 'version' and 'current_version' are both pointing to the downgrade-target version
@@ -69,7 +69,7 @@ kafka_user_nofile_limit = config['configurations']['kafka-env']['kafka_user_nofi
 kafka_user_nproc_limit = config['configurations']['kafka-env']['kafka_user_nproc_limit']
 kafka_user_nproc_limit = config['configurations']['kafka-env']['kafka_user_nproc_limit']
 
 
 # parameters for 2.2+
 # parameters for 2.2+
-if Script.is_hdp_stack_greater_or_equal("2.2"):
+if Script.is_stack_greater_or_equal("2.2"):
   kafka_home = '/usr/hdp/current/kafka-broker/'
   kafka_home = '/usr/hdp/current/kafka-broker/'
   kafka_bin = kafka_home+'bin/kafka'
   kafka_bin = kafka_home+'bin/kafka'
   conf_dir = "/usr/hdp/current/kafka-broker/config"
   conf_dir = "/usr/hdp/current/kafka-broker/config"
@@ -139,7 +139,7 @@ security_enabled = config['configurations']['cluster-env']['security_enabled']
 kafka_kerberos_enabled = ('security.inter.broker.protocol' in config['configurations']['kafka-broker'] and
 kafka_kerberos_enabled = ('security.inter.broker.protocol' in config['configurations']['kafka-broker'] and
                           config['configurations']['kafka-broker']['security.inter.broker.protocol'] == "PLAINTEXTSASL")
                           config['configurations']['kafka-broker']['security.inter.broker.protocol'] == "PLAINTEXTSASL")
 
 
-if security_enabled and hdp_stack_version != "" and 'kafka_principal_name' in config['configurations']['kafka-env'] and compare_versions(hdp_stack_version, '2.3') >= 0:
+if security_enabled and stack_version_formatted != "" and 'kafka_principal_name' in config['configurations']['kafka-env'] and compare_versions(stack_version_formatted, '2.3') >= 0:
     _hostname_lowercase = config['hostname'].lower()
     _hostname_lowercase = config['hostname'].lower()
     _kafka_principal_name = config['configurations']['kafka-env']['kafka_principal_name']
     _kafka_principal_name = config['configurations']['kafka-env']['kafka_principal_name']
     kafka_jaas_principal = _kafka_principal_name.replace('_HOST',_hostname_lowercase)
     kafka_jaas_principal = _kafka_principal_name.replace('_HOST',_hostname_lowercase)
@@ -248,7 +248,7 @@ if has_ranger_admin and is_supported_kafka_ranger:
   ssl_truststore_password = unicode(config['configurations']['ranger-kafka-policymgr-ssl']['xasecure.policymgr.clientssl.truststore.password']) if xml_configurations_supported else None
   ssl_truststore_password = unicode(config['configurations']['ranger-kafka-policymgr-ssl']['xasecure.policymgr.clientssl.truststore.password']) if xml_configurations_supported else None
   credential_file = format('/etc/ranger/{repo_name}/cred.jceks') if xml_configurations_supported else None
   credential_file = format('/etc/ranger/{repo_name}/cred.jceks') if xml_configurations_supported else None
 
 
-  hdp_version = get_hdp_version('kafka-broker')
+  hdp_version = get_stack_version('kafka-broker')
   setup_ranger_env_sh_source = format('/usr/hdp/{hdp_version}/ranger-kafka-plugin/install/conf.templates/enable/kafka-ranger-env.sh')
   setup_ranger_env_sh_source = format('/usr/hdp/{hdp_version}/ranger-kafka-plugin/install/conf.templates/enable/kafka-ranger-env.sh')
   setup_ranger_env_sh_target = format("{conf_dir}/kafka-ranger-env.sh")
   setup_ranger_env_sh_target = format("{conf_dir}/kafka-ranger-env.sh")
 
 
@@ -264,7 +264,7 @@ hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] if
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] if has_namenode else None
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] if has_namenode else None
 hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None
 hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None
 default_fs = config['configurations']['core-site']['fs.defaultFS'] if has_namenode else None
 default_fs = config['configurations']['core-site']['fs.defaultFS'] if has_namenode else None
-hadoop_bin_dir = hdp_select.get_hadoop_dir("bin") if has_namenode else None
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin") if has_namenode else None
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
 kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 
 

+ 2 - 2
ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox.py

@@ -66,7 +66,7 @@ def knox():
      content=InlineTemplate(params.admin_topology_template)
      content=InlineTemplate(params.admin_topology_template)
   )
   )
 
 
-  if Script.is_hdp_stack_greater_or_equal_to(params.version_formatted, "2.3.8.0"):
+  if Script.is_stack_greater_or_equal_to(params.version_formatted, "2.3.8.0"):
       File(os.path.join(params.knox_conf_dir, "topologies", "knoxsso.xml"),
       File(os.path.join(params.knox_conf_dir, "topologies", "knoxsso.xml"),
          group=params.knox_group,
          group=params.knox_group,
          owner=params.knox_user,
          owner=params.knox_user,
@@ -123,7 +123,7 @@ def knox():
          content=InlineTemplate(params.admin_topology_template)
          content=InlineTemplate(params.admin_topology_template)
     )
     )
 
 
-    if Script.is_hdp_stack_greater_or_equal_to(params.version_formatted, "2.3.8.0"):
+    if Script.is_stack_greater_or_equal_to(params.version_formatted, "2.3.8.0"):
         File(os.path.join(params.knox_conf_dir, "topologies", "knoxsso.xml"),
         File(os.path.join(params.knox_conf_dir, "topologies", "knoxsso.xml"),
             group=params.knox_group,
             group=params.knox_group,
             owner=params.knox_user,
             owner=params.knox_user,

+ 5 - 5
ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py

@@ -22,12 +22,12 @@ import tarfile
 
 
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import conf_select, tar_archive
 from resource_management.libraries.functions import conf_select, tar_archive
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import Direction
 from resource_management.libraries.functions import Direction
 from resource_management.libraries.functions.security_commons import build_expectations, \
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, validate_security_config_properties, get_params_from_filesystem, \
   cached_kinit_executor, validate_security_config_properties, get_params_from_filesystem, \
@@ -112,7 +112,7 @@ class KnoxGatewayDefault(KnoxGateway):
   def pre_upgrade_restart(self, env, upgrade_type=None):
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
 
 
       absolute_backup_dir = None
       absolute_backup_dir = None
       if params.upgrade_direction and params.upgrade_direction == Direction.UPGRADE:
       if params.upgrade_direction and params.upgrade_direction == Direction.UPGRADE:
@@ -123,7 +123,7 @@ class KnoxGatewayDefault(KnoxGateway):
 
 
       # conf-select will change the symlink to the conf folder.
       # conf-select will change the symlink to the conf folder.
       conf_select.select(params.stack_name, "knox", params.version)
       conf_select.select(params.stack_name, "knox", params.version)
-      hdp_select.select("knox-server", params.version)
+      stack_select.select("knox-server", params.version)
 
 
       # Extract the tar of the old conf folder into the new conf directory
       # Extract the tar of the old conf folder into the new conf directory
       if absolute_backup_dir is not None and params.upgrade_direction and params.upgrade_direction == Direction.UPGRADE:
       if absolute_backup_dir is not None and params.upgrade_direction and params.upgrade_direction == Direction.UPGRADE:

+ 9 - 9
ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py

@@ -22,15 +22,15 @@ from resource_management.core.logger import Logger
 
 
 import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set.
 import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set.
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.get_port_from_url import get_port_from_url
 from resource_management.libraries.functions.get_port_from_url import get_port_from_url
-from resource_management.libraries.functions.get_hdp_version import get_hdp_version
+from resource_management.libraries.functions.get_stack_version import get_stack_version
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 from status_params import *
 from status_params import *
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
 
 
 # server configurations
 # server configurations
@@ -41,11 +41,11 @@ stack_name = default("/hostLevelParams/stack_name", None)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 version = default("/commandParams/version", None)
 version = default("/commandParams/version", None)
 # E.g., 2.3.2.0
 # E.g., 2.3.2.0
-version_formatted = format_hdp_stack_version(version)
+version_formatted = format_stack_version(version)
 
 
 # E.g., 2.3
 # E.g., 2.3
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
 
 
 # This is the version whose state is CURRENT. During an RU, this is the source version.
 # This is the version whose state is CURRENT. During an RU, this is the source version.
 # DO NOT format it since we need the build number too.
 # DO NOT format it since we need the build number too.
@@ -59,7 +59,7 @@ knox_data_dir = '/var/lib/knox/data'
 # Important, it has to be strictly greater than 2.3.0.0!!!
 # Important, it has to be strictly greater than 2.3.0.0!!!
 if stack_name and stack_name.upper() == "HDP":
 if stack_name and stack_name.upper() == "HDP":
   Logger.info(format("HDP version to use is {version_formatted}"))
   Logger.info(format("HDP version to use is {version_formatted}"))
-  if Script.is_hdp_stack_greater(version_formatted, "2.3.0.0"):
+  if Script.is_stack_greater(version_formatted, "2.3.0.0"):
     # This is the current version. In the case of a Rolling Upgrade, it will be the newer version.
     # This is the current version. In the case of a Rolling Upgrade, it will be the newer version.
     # In the case of a Downgrade, it will be the version downgrading to.
     # In the case of a Downgrade, it will be the version downgrading to.
     # This is always going to be a symlink to /var/lib/knox/data_${version}
     # This is always going to be a symlink to /var/lib/knox/data_${version}
@@ -82,7 +82,7 @@ ldap_bin = '/usr/lib/knox/bin/ldap.sh'
 knox_client_bin = '/usr/lib/knox/bin/knoxcli.sh'
 knox_client_bin = '/usr/lib/knox/bin/knoxcli.sh'
 
 
 # HDP 2.2+ parameters
 # HDP 2.2+ parameters
-if Script.is_hdp_stack_greater_or_equal("2.2"):
+if Script.is_stack_greater_or_equal("2.2"):
   knox_bin = '/usr/hdp/current/knox-server/bin/gateway.sh'
   knox_bin = '/usr/hdp/current/knox-server/bin/gateway.sh'
   knox_conf_dir = '/usr/hdp/current/knox-server/conf'
   knox_conf_dir = '/usr/hdp/current/knox-server/conf'
   ldap_bin = '/usr/hdp/current/knox-server/bin/ldap.sh'
   ldap_bin = '/usr/hdp/current/knox-server/bin/ldap.sh'
@@ -96,7 +96,7 @@ knox_group = default("/configurations/knox-env/knox_group", "knox")
 mode = 0644
 mode = 0644
 
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
 
 
 dfs_ha_enabled = False
 dfs_ha_enabled = False
 dfs_ha_nameservices = default("/configurations/hdfs-site/dfs.nameservices", None)
 dfs_ha_nameservices = default("/configurations/hdfs-site/dfs.nameservices", None)
@@ -334,7 +334,7 @@ hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] if
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] if has_namenode else None
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] if has_namenode else None
 hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None
 hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None
 default_fs = config['configurations']['core-site']['fs.defaultFS'] if has_namenode else None
 default_fs = config['configurations']['core-site']['fs.defaultFS'] if has_namenode else None
-hadoop_bin_dir = hdp_select.get_hadoop_dir("bin") if has_namenode else None
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin") if has_namenode else None
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
 
 
 import functools
 import functools

+ 1 - 1
ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/status_params.py

@@ -30,7 +30,7 @@ if OSCheck.is_windows_family():
   knox_ldap_win_service_name = "ldap"
   knox_ldap_win_service_name = "ldap"
 else:
 else:
   knox_conf_dir = '/etc/knox/conf'
   knox_conf_dir = '/etc/knox/conf'
-  if Script.is_hdp_stack_greater_or_equal("2.2"):
+  if Script.is_stack_greater_or_equal("2.2"):
     knox_conf_dir = '/usr/hdp/current/knox-server/conf'
     knox_conf_dir = '/usr/hdp/current/knox-server/conf'
   knox_pid_dir = config['configurations']['knox-env']['knox_pid_dir']
   knox_pid_dir = config['configurations']['knox-env']['knox_pid_dir']
   knox_pid_file = format("{knox_pid_dir}/gateway.pid")
   knox_pid_file = format("{knox_pid_dir}/gateway.pid")

+ 2 - 2
ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/upgrade.py

@@ -27,7 +27,7 @@ from resource_management.core.exceptions import Fail
 from resource_management.libraries.functions import tar_archive
 from resource_management.libraries.functions import tar_archive
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import Direction
 from resource_management.libraries.functions import Direction
-from resource_management.libraries.functions.version import compare_versions,format_hdp_stack_version
+from resource_management.libraries.functions.version import compare_versions,format_stack_version
 
 
 
 
 BACKUP_TEMP_DIR = "knox-upgrade-backup"
 BACKUP_TEMP_DIR = "knox-upgrade-backup"
@@ -82,7 +82,7 @@ def _get_directory_mappings_during_upgrade():
   knox_data_dir = '/var/lib/knox/data'
   knox_data_dir = '/var/lib/knox/data'
 
 
   if params.stack_name and params.stack_name.upper() == "HDP" and \
   if params.stack_name and params.stack_name.upper() == "HDP" and \
-          compare_versions(format_hdp_stack_version(params.upgrade_from_version), "2.3.0.0") > 0:
+          compare_versions(format_stack_version(params.upgrade_from_version), "2.3.0.0") > 0:
     # Use the version that is being upgraded from.
     # Use the version that is being upgraded from.
     knox_data_dir = format('/usr/hdp/{upgrade_from_version}/knox/data')
     knox_data_dir = format('/usr/hdp/{upgrade_from_version}/knox/data')
 
 

+ 2 - 2
ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py

@@ -20,7 +20,7 @@ Ambari Agent
 """
 """
 from resource_management.core.logger import Logger
 from resource_management.core.logger import Logger
 from resource_management.core.exceptions import ClientComponentHasNoStatus
 from resource_management.core.exceptions import ClientComponentHasNoStatus
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.script import Script
 from resource_management.libraries.script import Script
 from mahout import mahout
 from mahout import mahout
@@ -38,7 +38,7 @@ class MahoutClient(Script):
     env.set_params(params)
     env.set_params(params)
 
 
     conf_select.select(params.stack_name, "mahout", params.version)
     conf_select.select(params.stack_name, "mahout", params.version)
-    hdp_select.select("mahout-client", params.version )
+    stack_select.select("mahout-client", params.version )
 
 
 
 
   def install(self, env):
   def install(self, env):

+ 5 - 5
ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py

@@ -20,9 +20,9 @@ Ambari Agent
 """
 """
 from resource_management import *
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
@@ -35,7 +35,7 @@ stack_name = default("/hostLevelParams/stack_name", None)
 host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
 
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 version = default("/commandParams/version", None)
 version = default("/commandParams/version", None)
@@ -48,8 +48,8 @@ mahout_user = config['configurations']['mahout-env']['mahout_user']
 yarn_log_dir_prefix = config['configurations']['yarn-env']['yarn_log_dir_prefix']
 yarn_log_dir_prefix = config['configurations']['yarn-env']['yarn_log_dir_prefix']
 
 
 #hadoop params
 #hadoop params
-hadoop_bin_dir = hdp_select.get_hadoop_dir("bin")
-hadoop_home = hdp_select.get_hadoop_dir("home")
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
+hadoop_home = stack_select.get_hadoop_dir("home")
 
 
 # the configuration direction for HDFS/YARN/MapR is the hadoop config
 # the configuration direction for HDFS/YARN/MapR is the hadoop config
 # directory, which is symlinked by hadoop-client only
 # directory, which is symlinked by hadoop-client only

+ 2 - 2
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py

@@ -146,7 +146,7 @@ def oozie(is_server=False):
       owner=params.oozie_user
       owner=params.oozie_user
     )
     )
 
 
-  if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
+  if params.stack_version_formatted != "" and compare_versions(params.stack_version_formatted, '2.2') >= 0:
     File(format("{params.conf_dir}/adminusers.txt"),
     File(format("{params.conf_dir}/adminusers.txt"),
       mode=0644,
       mode=0644,
       group=params.user_group,
       group=params.user_group,
@@ -318,7 +318,7 @@ def oozie_server_specific():
        mode = 0644,
        mode = 0644,
   )
   )
 
 
-  if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
+  if params.stack_version_formatted != "" and compare_versions(params.stack_version_formatted, '2.2') >= 0:
     # Create hive-site and tez-site configs for oozie
     # Create hive-site and tez-site configs for oozie
     Directory(params.hive_conf_dir,
     Directory(params.hive_conf_dir,
         create_parents = True,
         create_parents = True,

+ 3 - 3
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py

@@ -21,7 +21,7 @@ limitations under the License.
 import sys
 import sys
 from resource_management import *
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 
 
 from oozie import oozie
 from oozie import oozie
 from oozie_service import oozie_service
 from oozie_service import oozie_service
@@ -53,12 +53,12 @@ class OozieClient(Script):
 
 
     # this function should not execute if the version can't be determined or
     # this function should not execute if the version can't be determined or
     # is not at least HDP 2.2.0.0
     # is not at least HDP 2.2.0.0
-    if not params.version or compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') < 0:
+    if not params.version or compare_versions(format_stack_version(params.version), '2.2.0.0') < 0:
       return
       return
 
 
     Logger.info("Executing Oozie Client Stack Upgrade pre-restart")
     Logger.info("Executing Oozie Client Stack Upgrade pre-restart")
     conf_select.select(params.stack_name, "oozie", params.version)
     conf_select.select(params.stack_name, "oozie", params.version)
-    hdp_select.select("oozie-client", params.version)
+    stack_select.select("oozie-client", params.version)
 
 
   # We substitute some configs (oozie.authentication.kerberos.principal) before generation (see oozie.py and params.py).
   # We substitute some configs (oozie.authentication.kerberos.principal) before generation (see oozie.py and params.py).
   # This function returns changed configs (it's used for config generation before config download)
   # This function returns changed configs (it's used for config generation before config download)

+ 9 - 9
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py

@@ -22,8 +22,8 @@ from resource_management.core import Logger
 from resource_management.libraries.script import Script
 from resource_management.libraries.script import Script
 from resource_management.libraries.functions import compare_versions
 from resource_management.libraries.functions import compare_versions
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
-from resource_management.libraries.functions import format_hdp_stack_version
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions import format_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions import default
 from resource_management.libraries.functions import default
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.functions.constants import Direction
@@ -65,17 +65,17 @@ class OozieServer(Script):
 
 
     if upgrade_type is not None and params.upgrade_direction == Direction.UPGRADE and params.version is not None:
     if upgrade_type is not None and params.upgrade_direction == Direction.UPGRADE and params.version is not None:
       Logger.info(format("Configuring Oozie during upgrade type: {upgrade_type}, direction: {params.upgrade_direction}, and version {params.version}"))
       Logger.info(format("Configuring Oozie during upgrade type: {upgrade_type}, direction: {params.upgrade_direction}, and version {params.version}"))
-      if compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+      if compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
         # In order for the "/usr/hdp/current/oozie-<client/server>" point to the new version of
         # In order for the "/usr/hdp/current/oozie-<client/server>" point to the new version of
         # oozie, we need to create the symlinks both for server and client.
         # oozie, we need to create the symlinks both for server and client.
         # This is required as both need to be pointing to new installed oozie version.
         # This is required as both need to be pointing to new installed oozie version.
 
 
         # Sets the symlink : eg: /usr/hdp/current/oozie-client -> /usr/hdp/2.3.x.y-<version>/oozie
         # Sets the symlink : eg: /usr/hdp/current/oozie-client -> /usr/hdp/2.3.x.y-<version>/oozie
-        hdp_select.select("oozie-client", params.version)
+        stack_select.select("oozie-client", params.version)
         # Sets the symlink : eg: /usr/hdp/current/oozie-server -> /usr/hdp/2.3.x.y-<version>/oozie
         # Sets the symlink : eg: /usr/hdp/current/oozie-server -> /usr/hdp/2.3.x.y-<version>/oozie
-        hdp_select.select("oozie-server", params.version)
+        stack_select.select("oozie-server", params.version)
 
 
-      if compare_versions(format_hdp_stack_version(params.version), '2.3.0.0') >= 0:
+      if compare_versions(format_stack_version(params.version), '2.3.0.0') >= 0:
         conf_select.select(params.stack_name, "oozie", params.version)
         conf_select.select(params.stack_name, "oozie", params.version)
 
 
     env.set_params(params)
     env.set_params(params)
@@ -187,16 +187,16 @@ class OozieServerDefault(OozieServer):
 
 
     # this function should not execute if the version can't be determined or
     # this function should not execute if the version can't be determined or
     # is not at least HDP 2.2.0.0
     # is not at least HDP 2.2.0.0
-    if not params.version or compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') < 0:
+    if not params.version or compare_versions(format_stack_version(params.version), '2.2.0.0') < 0:
       return
       return
 
 
     Logger.info("Executing Oozie Server Stack Upgrade pre-restart")
     Logger.info("Executing Oozie Server Stack Upgrade pre-restart")
 
 
     OozieUpgrade.backup_configuration()
     OozieUpgrade.backup_configuration()
 
 
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "oozie", params.version)
       conf_select.select(params.stack_name, "oozie", params.version)
-      hdp_select.select("oozie-server", params.version)
+      stack_select.select("oozie-server", params.version)
 
 
     OozieUpgrade.restore_configuration()
     OozieUpgrade.restore_configuration()
     OozieUpgrade.prepare_libext_directory()
     OozieUpgrade.prepare_libext_directory()

+ 6 - 6
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py

@@ -30,8 +30,8 @@ from resource_management.core.resources.system import File
 from resource_management.libraries.functions import Direction
 from resource_management.libraries.functions import Direction
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import compare_versions
 from resource_management.libraries.functions import compare_versions
-from resource_management.libraries.functions import hdp_select
-from resource_management.libraries.functions import format_hdp_stack_version
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions import format_stack_version
 from resource_management.libraries.functions import tar_archive
 from resource_management.libraries.functions import tar_archive
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 
 
@@ -108,7 +108,7 @@ class OozieUpgrade(Script):
 
 
     # some versions of HDP don't need the lzo compression libraries
     # some versions of HDP don't need the lzo compression libraries
     target_version_needs_compression_libraries = compare_versions(
     target_version_needs_compression_libraries = compare_versions(
-      format_hdp_stack_version(params.version), '2.2.1.0') >= 0
+      format_stack_version(params.version), '2.2.1.0') >= 0
 
 
     # ensure the directory exists
     # ensure the directory exists
     Directory(params.oozie_libext_dir, mode = 0777)
     Directory(params.oozie_libext_dir, mode = 0777)
@@ -162,7 +162,7 @@ class OozieUpgrade(Script):
     oozie.download_database_library_if_needed()
     oozie.download_database_library_if_needed()
 
 
     # get the upgrade version in the event that it's needed
     # get the upgrade version in the event that it's needed
-    upgrade_stack = hdp_select._get_upgrade_stack()
+    upgrade_stack = stack_select._get_upgrade_stack()
     if upgrade_stack is None or len(upgrade_stack) < 2 or upgrade_stack[1] is None:
     if upgrade_stack is None or len(upgrade_stack) < 2 or upgrade_stack[1] is None:
       raise Fail("Unable to determine the stack that is being upgraded to or downgraded to.")
       raise Fail("Unable to determine the stack that is being upgraded to or downgraded to.")
 
 
@@ -226,7 +226,7 @@ class OozieUpgrade(Script):
       command = format("{kinit_path_local} -kt {oozie_keytab} {oozie_principal_with_host}")
       command = format("{kinit_path_local} -kt {oozie_keytab} {oozie_principal_with_host}")
       Execute(command, user=params.oozie_user, logoutput=True)
       Execute(command, user=params.oozie_user, logoutput=True)
 
 
-    upgrade_stack = hdp_select._get_upgrade_stack()
+    upgrade_stack = stack_select._get_upgrade_stack()
     if upgrade_stack is None or len(upgrade_stack) < 2 or upgrade_stack[1] is None:
     if upgrade_stack is None or len(upgrade_stack) < 2 or upgrade_stack[1] is None:
       raise Fail("Unable to determine the stack that is being upgraded to or downgraded to.")
       raise Fail("Unable to determine the stack that is being upgraded to or downgraded to.")
 
 
@@ -278,7 +278,7 @@ class OozieUpgrade(Script):
 
 
     params.HdfsResource(None, action = "execute")
     params.HdfsResource(None, action = "execute")
 
 
-    upgrade_stack = hdp_select._get_upgrade_stack()
+    upgrade_stack = stack_select._get_upgrade_stack()
     if upgrade_stack is None or upgrade_stack[1] is None:
     if upgrade_stack is None or upgrade_stack[1] is None:
       raise Fail("Unable to determine the stack that is being upgraded to or downgraded to.")
       raise Fail("Unable to determine the stack that is being upgraded to or downgraded to.")
 
 

+ 9 - 9
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py

@@ -22,8 +22,8 @@ from ambari_commons.constants import AMBARI_SUDO_BINARY
 from ambari_commons.str_utils import cbool, cint
 from ambari_commons.str_utils import cbool, cint
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_port_from_url
 from resource_management.libraries.functions import get_port_from_url
@@ -51,17 +51,17 @@ agent_stack_retry_on_unavailability = cbool(config["hostLevelParams"]["agent_sta
 agent_stack_retry_count = cint(config["hostLevelParams"]["agent_stack_retry_count"])
 agent_stack_retry_count = cint(config["hostLevelParams"]["agent_stack_retry_count"])
 
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
 
 
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-hadoop_bin_dir = hdp_select.get_hadoop_dir("bin")
-hadoop_lib_home = hdp_select.get_hadoop_dir("lib")
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
+hadoop_lib_home = stack_select.get_hadoop_dir("lib")
 
 
 #hadoop params
 #hadoop params
-if Script.is_hdp_stack_greater_or_equal("2.2"):
+if Script.is_stack_greater_or_equal("2.2"):
   # something like 2.3.0.0-1234
   # something like 2.3.0.0-1234
   stack_version = None
   stack_version = None
-  upgrade_stack = hdp_select._get_upgrade_stack()
+  upgrade_stack = stack_select._get_upgrade_stack()
   if upgrade_stack is not None and len(upgrade_stack) == 2 and upgrade_stack[1] is not None:
   if upgrade_stack is not None and len(upgrade_stack) == 2 and upgrade_stack[1] is not None:
     stack_version = upgrade_stack[1]
     stack_version = upgrade_stack[1]
 
 
@@ -143,7 +143,7 @@ oozie_site = config['configurations']['oozie-site']
 # Need this for yarn.nodemanager.recovery.dir in yarn-site
 # Need this for yarn.nodemanager.recovery.dir in yarn-site
 yarn_log_dir_prefix = config['configurations']['yarn-env']['yarn_log_dir_prefix']
 yarn_log_dir_prefix = config['configurations']['yarn-env']['yarn_log_dir_prefix']
 
 
-if security_enabled and Script.is_hdp_stack_less_than("2.2"):
+if security_enabled and Script.is_stack_less_than("2.2"):
   #older versions of oozie have problems when using _HOST in principal
   #older versions of oozie have problems when using _HOST in principal
   oozie_site = dict(config['configurations']['oozie-site'])
   oozie_site = dict(config['configurations']['oozie-site'])
   oozie_site['oozie.service.HadoopAccessorService.kerberos.principal'] = \
   oozie_site['oozie.service.HadoopAccessorService.kerberos.principal'] = \
@@ -194,7 +194,7 @@ if https_port is not None:
 hdfs_site = config['configurations']['hdfs-site']
 hdfs_site = config['configurations']['hdfs-site']
 fs_root = config['configurations']['core-site']['fs.defaultFS']
 fs_root = config['configurations']['core-site']['fs.defaultFS']
 
 
-if Script.is_hdp_stack_less_than("2.2"):
+if Script.is_stack_less_than("2.2"):
   put_shared_lib_to_hdfs_cmd = format("hadoop --config {hadoop_conf_dir} dfs -put {oozie_shared_lib} {oozie_hdfs_user_dir}")
   put_shared_lib_to_hdfs_cmd = format("hadoop --config {hadoop_conf_dir} dfs -put {oozie_shared_lib} {oozie_hdfs_user_dir}")
 # for newer
 # for newer
 else:
 else:

+ 1 - 1
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/status_params.py

@@ -48,7 +48,7 @@ else:
   kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
   kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 
 
   conf_dir = "/etc/oozie/conf"
   conf_dir = "/etc/oozie/conf"
-  if Script.is_hdp_stack_greater_or_equal("2.2"):
+  if Script.is_stack_greater_or_equal("2.2"):
     conf_dir = format("/usr/hdp/current/{component_directory}/conf")
     conf_dir = format("/usr/hdp/current/{component_directory}/conf")
 
 
   tmp_dir = Script.get_tmp_dir()
   tmp_dir = Script.get_tmp_dir()

+ 6 - 6
ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py

@@ -22,8 +22,8 @@ Ambari Agent
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 
 
@@ -34,22 +34,22 @@ tmp_dir = Script.get_tmp_dir()
 stack_name = default("/hostLevelParams/stack_name", None)
 stack_name = default("/hostLevelParams/stack_name", None)
 
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
 
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 version = default("/commandParams/version", None)
 version = default("/commandParams/version", None)
 
 
 # hadoop default parameters
 # hadoop default parameters
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-hadoop_bin_dir = hdp_select.get_hadoop_dir("bin")
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
 pig_conf_dir = "/etc/pig/conf"
 pig_conf_dir = "/etc/pig/conf"
 hadoop_home = '/usr'
 hadoop_home = '/usr'
 pig_bin_dir = ""
 pig_bin_dir = ""
 
 
 # hadoop parameters for 2.2+
 # hadoop parameters for 2.2+
-if Script.is_hdp_stack_greater_or_equal("2.2"):
+if Script.is_stack_greater_or_equal("2.2"):
   pig_conf_dir = "/usr/hdp/current/pig-client/conf"
   pig_conf_dir = "/usr/hdp/current/pig-client/conf"
-  hadoop_home = hdp_select.get_hadoop_dir("home")
+  hadoop_home = stack_select.get_hadoop_dir("home")
   pig_bin_dir = '/usr/hdp/current/pig-client/bin'
   pig_bin_dir = '/usr/hdp/current/pig-client/bin'
 
 
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']

+ 3 - 3
ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py

@@ -23,7 +23,7 @@ import sys
 import os
 import os
 from resource_management import *
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from pig import pig
 from pig import pig
 from ambari_commons import OSConst
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
@@ -46,10 +46,10 @@ class PigClientLinux(PigClient):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "pig", params.version)
       conf_select.select(params.stack_name, "pig", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      hdp_select.select("hadoop-client", params.version) # includes pig-client
+      stack_select.select("hadoop-client", params.version) # includes pig-client
 
 
   def install(self, env):
   def install(self, env):
     self.install_packages(env)
     self.install_packages(env)

+ 1 - 1
ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py

@@ -84,7 +84,7 @@ class PigServiceCheckLinux(PigServiceCheck):
       bin_dir = params.hadoop_bin_dir
       bin_dir = params.hadoop_bin_dir
     )
     )
 
 
-    if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
+    if params.stack_version_formatted != "" and compare_versions(params.stack_version_formatted, '2.2') >= 0:
       # cleanup results from previous test
       # cleanup results from previous test
       params.HdfsResource(output_dir,
       params.HdfsResource(output_dir,
                           type="directory",
                           type="directory",

+ 5 - 5
ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py

@@ -19,7 +19,7 @@ limitations under the License.
 """
 """
 import os
 import os
 from resource_management.libraries.script import Script
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.is_empty import is_empty
 from resource_management.libraries.functions.is_empty import is_empty
@@ -43,7 +43,7 @@ version = default("/commandParams/version", None)
 host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
 
 
 upgrade_marker_file = format("{tmp_dir}/rangeradmin_ru.inprogress")
 upgrade_marker_file = format("{tmp_dir}/rangeradmin_ru.inprogress")
 
 
@@ -51,8 +51,8 @@ xml_configurations_supported = config['configurations']['ranger-env']['xml_confi
 
 
 create_db_dbuser = config['configurations']['ranger-env']['create_db_dbuser']
 create_db_dbuser = config['configurations']['ranger-env']['create_db_dbuser']
 
 
-stack_is_hdp22_or_further = Script.is_hdp_stack_greater_or_equal("2.2")
-stack_is_hdp23_or_further = Script.is_hdp_stack_greater_or_equal("2.3")
+stack_is_hdp22_or_further = Script.is_stack_greater_or_equal("2.2")
+stack_is_hdp23_or_further = Script.is_stack_greater_or_equal("2.3")
 
 
 downgrade_from_version = default("/commandParams/downgrade_from_version", None)
 downgrade_from_version = default("/commandParams/downgrade_from_version", None)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
@@ -60,7 +60,7 @@ upgrade_direction = default("/commandParams/upgrade_direction", None)
 ranger_conf    = '/etc/ranger/admin/conf'
 ranger_conf    = '/etc/ranger/admin/conf'
 ranger_ugsync_conf = '/etc/ranger/usersync/conf'
 ranger_ugsync_conf = '/etc/ranger/usersync/conf'
 
 
-if upgrade_direction == Direction.DOWNGRADE and compare_versions(format_hdp_stack_version(version),'2.3' ) < 0:
+if upgrade_direction == Direction.DOWNGRADE and compare_versions(format_stack_version(version),'2.3' ) < 0:
   stack_is_hdp22_or_further = True
   stack_is_hdp22_or_further = True
   stack_is_hdp23_or_further = False
   stack_is_hdp23_or_further = False
 
 

+ 3 - 3
ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py

@@ -17,7 +17,7 @@ See the License for the specific language governing permissions and
 limitations under the License.
 limitations under the License.
 
 
 """
 """
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.script import Script
 from resource_management.libraries.script import Script
 from resource_management.core.resources.system import Execute
 from resource_management.core.resources.system import Execute
 from resource_management.core.exceptions import ComponentIsNotRunning
 from resource_management.core.exceptions import ComponentIsNotRunning
@@ -123,7 +123,7 @@ class RangerAdmin(Script):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    upgrade_stack = hdp_select._get_upgrade_stack()
+    upgrade_stack = stack_select._get_upgrade_stack()
     if upgrade_stack is None:
     if upgrade_stack is None:
       raise Fail('Unable to determine the stack and stack version')
       raise Fail('Unable to determine the stack and stack version')
 
 
@@ -139,7 +139,7 @@ class RangerAdmin(Script):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    upgrade_stack = hdp_select._get_upgrade_stack()
+    upgrade_stack = stack_select._get_upgrade_stack()
     if upgrade_stack is None:
     if upgrade_stack is None:
       raise Fail('Unable to determine the stack and stack version')
       raise Fail('Unable to determine the stack and stack version')
 
 

+ 2 - 2
ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py

@@ -20,7 +20,7 @@ limitations under the License.
 """
 """
 from resource_management.core.resources.system import Execute
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
 
 
 def prestart(env, hdp_component):
 def prestart(env, hdp_component):
@@ -28,4 +28,4 @@ def prestart(env, hdp_component):
 
 
   if params.version and params.stack_is_hdp22_or_further:
   if params.version and params.stack_is_hdp22_or_further:
     conf_select.select(params.stack_name, hdp_component, params.version)
     conf_select.select(params.stack_name, hdp_component, params.version)
-    hdp_select.select(hdp_component, params.version)
+    stack_select.select(hdp_component, params.version)

+ 3 - 3
ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py

@@ -19,7 +19,7 @@ limitations under the License.
 """
 """
 import os
 import os
 from resource_management.libraries.script import Script
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions.version import format_stack_version, compare_versions
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 
 
@@ -30,9 +30,9 @@ stack_name = default("/hostLevelParams/stack_name", None)
 version = default("/commandParams/version", None)
 version = default("/commandParams/version", None)
 
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
 
 
-stack_is_hdp23_or_further = Script.is_hdp_stack_greater_or_equal("2.3")
+stack_is_hdp23_or_further = Script.is_stack_greater_or_equal("2.3")
 
 
 if stack_is_hdp23_or_further:
 if stack_is_hdp23_or_further:
   kms_home = '/usr/hdp/current/ranger-kms'
   kms_home = '/usr/hdp/current/ranger-kms'

+ 2 - 2
ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py

@@ -19,7 +19,7 @@ limitations under the License.
 """
 """
 from resource_management.core.resources.system import Execute
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
 
 
 def prestart(env, hdp_component):
 def prestart(env, hdp_component):
@@ -27,4 +27,4 @@ def prestart(env, hdp_component):
 
 
   if params.version and params.stack_is_hdp23_or_further:
   if params.version and params.stack_is_hdp23_or_further:
     conf_select.select(params.stack_name, hdp_component, params.version)
     conf_select.select(params.stack_name, hdp_component, params.version)
-    hdp_select.select(hdp_component, params.version)
+    stack_select.select(hdp_component, params.version)

+ 2 - 2
ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py

@@ -20,7 +20,7 @@ limitations under the License.
 from ambari_commons.os_check import OSCheck
 from ambari_commons.os_check import OSCheck
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
@@ -41,7 +41,7 @@ stack_name = default("/hostLevelParams/stack_name", None)
 version = default("/commandParams/version", None)
 version = default("/commandParams/version", None)
 
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
 
 
 #hadoop params
 #hadoop params
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()

+ 3 - 3
ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py

@@ -18,7 +18,7 @@ limitations under the License.
 """
 """
 from resource_management.libraries.resources import HdfsResource
 from resource_management.libraries.resources import HdfsResource
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
@@ -31,7 +31,7 @@ slider_home_dir = '/usr/hdp/current/slider-client'
 
 
 #hadoop params
 #hadoop params
 slider_bin_dir = "/usr/lib/slider/bin"
 slider_bin_dir = "/usr/lib/slider/bin"
-if Script.is_hdp_stack_greater_or_equal("2.2"):
+if Script.is_stack_greater_or_equal("2.2"):
     slider_bin_dir = format('{slider_home_dir}/bin')
     slider_bin_dir = format('{slider_home_dir}/bin')
 
 
 slider_conf_dir = format("{slider_home_dir}/conf")
 slider_conf_dir = format("{slider_home_dir}/conf")
@@ -52,7 +52,7 @@ hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 
 
-hadoop_bin_dir = hdp_select.get_hadoop_dir("bin")
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 
 
 hdfs_site = config['configurations']['hdfs-site']
 hdfs_site = config['configurations']['hdfs-site']

+ 1 - 1
ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py

@@ -38,7 +38,7 @@ class SliderServiceCheck(Script):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
     
     
-    if Script.is_hdp_stack_greater_or_equal("2.2"):
+    if Script.is_stack_greater_or_equal("2.2"):
       copy_to_hdfs("slider", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
       copy_to_hdfs("slider", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
     
     
     smokeuser_kinit_cmd = format(
     smokeuser_kinit_cmd = format(

+ 1 - 1
ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider.py

@@ -81,7 +81,7 @@ def slider():
     File(format("{params.slider_conf_dir}/log4j.properties"),
     File(format("{params.slider_conf_dir}/log4j.properties"),
          mode=0644
          mode=0644
     )
     )
-  if Script.is_hdp_stack_greater_or_equal("2.2"): 
+  if Script.is_stack_greater_or_equal("2.2"):
     File(params.slider_tar_gz,
     File(params.slider_tar_gz,
          owner=params.hdfs_user,
          owner=params.hdfs_user,
          group=params.user_group,
          group=params.user_group,

+ 4 - 4
ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py

@@ -20,7 +20,7 @@ limitations under the License.
 
 
 from resource_management import *
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from slider import slider
 from slider import slider
 from ambari_commons import OSConst
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
@@ -38,15 +38,15 @@ class SliderClientLinux(SliderClient):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "slider", params.version)
       conf_select.select(params.stack_name, "slider", params.version)
-      hdp_select.select("slider-client", params.version)
+      stack_select.select("slider-client", params.version)
 
 
       # also set all of the hadoop clients since slider client is upgraded as
       # also set all of the hadoop clients since slider client is upgraded as
       # part of the final "CLIENTS" group and we need to ensure that
       # part of the final "CLIENTS" group and we need to ensure that
       # hadoop-client is also set
       # hadoop-client is also set
       conf_select.select(params.stack_name, "hadoop", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      hdp_select.select("hadoop-client", params.version)
+      stack_select.select("hadoop-client", params.version)
 
 
   def install(self, env):
   def install(self, env):
     self.install_packages(env)
     self.install_packages(env)

+ 5 - 5
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py

@@ -23,8 +23,8 @@ import os
 
 
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.core.logger import Logger
 from resource_management.core.logger import Logger
@@ -74,15 +74,15 @@ class JobHistoryServer(Script):
     import params
     import params
 
 
     env.set_params(params)
     env.set_params(params)
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       Logger.info("Executing Spark Job History Server Stack Upgrade pre-restart")
       Logger.info("Executing Spark Job History Server Stack Upgrade pre-restart")
       conf_select.select(params.stack_name, "spark", params.version)
       conf_select.select(params.stack_name, "spark", params.version)
-      hdp_select.select("spark-historyserver", params.version)
+      stack_select.select("spark-historyserver", params.version)
 
 
       # Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not have a dependency on Tez, so it does not
       # Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not have a dependency on Tez, so it does not
       # need to copy the tarball, otherwise, copy it.
       # need to copy the tarball, otherwise, copy it.
 
 
-      if params.version and compare_versions(format_hdp_stack_version(params.version), '2.3.0.0') < 0:
+      if params.version and compare_versions(format_stack_version(params.version), '2.3.0.0') < 0:
         resource_created = copy_to_hdfs(
         resource_created = copy_to_hdfs(
           "tez",
           "tez",
           params.user_group,
           params.user_group,

+ 9 - 9
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py

@@ -25,10 +25,10 @@ from setup_spark import *
 
 
 import resource_management.libraries.functions
 import resource_management.libraries.functions
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
-from resource_management.libraries.functions.get_hdp_version import get_hdp_version
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.get_stack_version import get_stack_version
+from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 
 
@@ -49,7 +49,7 @@ tmp_dir = Script.get_tmp_dir()
 
 
 stack_name = default("/hostLevelParams/stack_name", None)
 stack_name = default("/hostLevelParams/stack_name", None)
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
 host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
 
 # New Cluster Stack Version that is defined during the RESTART of a Stack Upgrade
 # New Cluster Stack Version that is defined during the RESTART of a Stack Upgrade
@@ -58,16 +58,16 @@ version = default("/commandParams/version", None)
 # TODO! FIXME! Version check is not working as of today :
 # TODO! FIXME! Version check is not working as of today :
 #   $ yum list installed | grep hdp-select
 #   $ yum list installed | grep hdp-select
 #   hdp-select.noarch                            2.2.1.0-2340.el6           @HDP-2.2
 #   hdp-select.noarch                            2.2.1.0-2340.el6           @HDP-2.2
-# And hdp_stack_version returned from hostLevelParams/stack_version is : 2.2.0.0
+# And stack_version_formatted returned from hostLevelParams/stack_version is : 2.2.0.0
 # Commenting out for time being
 # Commenting out for time being
-#stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2.1.0') >= 0
+#stack_is_hdp22_or_further = stack_version_formatted != "" and compare_versions(stack_version_formatted, '2.2.1.0') >= 0
 
 
 spark_conf = '/etc/spark/conf'
 spark_conf = '/etc/spark/conf'
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-hadoop_bin_dir = hdp_select.get_hadoop_dir("bin")
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
 
 
-if Script.is_hdp_stack_greater_or_equal("2.2"):
-  hadoop_home = hdp_select.get_hadoop_dir("home")
+if Script.is_stack_greater_or_equal("2.2"):
+  hadoop_home = stack_select.get_hadoop_dir("home")
   spark_conf = format("/usr/hdp/current/{component_directory}/conf")
   spark_conf = format("/usr/hdp/current/{component_directory}/conf")
   spark_log_dir = config['configurations']['spark-env']['spark_log_dir']
   spark_log_dir = config['configurations']['spark-env']['spark_log_dir']
   spark_pid_dir = status_params.spark_pid_dir
   spark_pid_dir = status_params.spark_pid_dir

+ 3 - 3
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py

@@ -27,7 +27,7 @@ from resource_management.core.exceptions import ComponentIsNotRunning
 from resource_management.core.logger import Logger
 from resource_management.core.logger import Logger
 from resource_management.core import shell
 from resource_management.core import shell
 from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.functions.version import compare_versions
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
 
 
 def setup_spark(env, type, upgrade_type = None, action = None):
 def setup_spark(env, type, upgrade_type = None, action = None):
   import params
   import params
@@ -99,9 +99,9 @@ def setup_spark(env, type, upgrade_type = None, action = None):
       key_value_delimiter = " ",
       key_value_delimiter = " ",
     )
     )
 
 
-  effective_version = params.version if upgrade_type is not None else params.hdp_stack_version
+  effective_version = params.version if upgrade_type is not None else params.stack_version_formatted
   if effective_version:
   if effective_version:
-    effective_version = format_hdp_stack_version(effective_version)
+    effective_version = format_stack_version(effective_version)
 
 
   if params.spark_thrift_fairscheduler_content and effective_version and compare_versions(effective_version, '2.4.0.0') >= 0:
   if params.spark_thrift_fairscheduler_content and effective_version and compare_versions(effective_version, '2.4.0.0') >= 0:
     # create spark-thrift-fairscheduler.xml
     # create spark-thrift-fairscheduler.xml

+ 4 - 4
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_client.py

@@ -21,8 +21,8 @@ limitations under the License.
 import sys
 import sys
 from resource_management import *
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from resource_management.core.exceptions import ComponentIsNotRunning
 from resource_management.core.exceptions import ComponentIsNotRunning
 from resource_management.core.logger import Logger
 from resource_management.core.logger import Logger
 from resource_management.core import shell
 from resource_management.core import shell
@@ -50,10 +50,10 @@ class SparkClient(Script):
     import params
     import params
 
 
     env.set_params(params)
     env.set_params(params)
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       Logger.info("Executing Spark Client Stack Upgrade pre-restart")
       Logger.info("Executing Spark Client Stack Upgrade pre-restart")
       conf_select.select(params.stack_name, "spark", params.version)
       conf_select.select(params.stack_name, "spark", params.version)
-      hdp_select.select("spark-client", params.version)
+      stack_select.select("spark-client", params.version)
 
 
 if __name__ == "__main__":
 if __name__ == "__main__":
   SparkClient().execute()
   SparkClient().execute()

+ 4 - 4
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py

@@ -25,16 +25,16 @@ from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.core.resources.system import File, Execute
 from resource_management.core.resources.system import File, Execute
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
 
 
 def spark_service(name, upgrade_type=None, action=None):
 def spark_service(name, upgrade_type=None, action=None):
   import params
   import params
 
 
   if action == 'start':
   if action == 'start':
 
 
-    effective_version = params.version if upgrade_type is not None else params.hdp_stack_version
+    effective_version = params.version if upgrade_type is not None else params.stack_version_formatted
     if effective_version:
     if effective_version:
-      effective_version = format_hdp_stack_version(effective_version)
+      effective_version = format_stack_version(effective_version)
 
 
     if effective_version and compare_versions(effective_version, '2.4.0.0') >= 0:
     if effective_version and compare_versions(effective_version, '2.4.0.0') >= 0:
       # copy spark-hdp-assembly.jar to hdfs
       # copy spark-hdp-assembly.jar to hdfs
@@ -56,7 +56,7 @@ def spark_service(name, upgrade_type=None, action=None):
 
 
     # Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not have a dependency on Tez, so it does not
     # Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not have a dependency on Tez, so it does not
     # need to copy the tarball, otherwise, copy it.
     # need to copy the tarball, otherwise, copy it.
-    if params.hdp_stack_version and compare_versions(params.hdp_stack_version, '2.3.0.0') < 0:
+    if params.stack_version_formatted and compare_versions(params.stack_version_formatted, '2.3.0.0') < 0:
       resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
       resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
       if resource_created:
       if resource_created:
         params.HdfsResource(None, action="execute")
         params.HdfsResource(None, action="execute")

+ 4 - 4
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_thrift_server.py

@@ -23,8 +23,8 @@ import os
 
 
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.core.logger import Logger
 from resource_management.core.logger import Logger
@@ -70,10 +70,10 @@ class SparkThriftServer(Script):
     import params
     import params
 
 
     env.set_params(params)
     env.set_params(params)
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.3.2.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.3.2.0') >= 0:
       Logger.info("Executing Spark Thrift Server Stack Upgrade pre-restart")
       Logger.info("Executing Spark Thrift Server Stack Upgrade pre-restart")
       conf_select.select(params.stack_name, "spark", params.version)
       conf_select.select(params.stack_name, "spark", params.version)
-      hdp_select.select("spark-thriftserver", params.version)
+      stack_select.select("spark-thriftserver", params.version)
 
 
 if __name__ == "__main__":
 if __name__ == "__main__":
   SparkThriftServer().execute()
   SparkThriftServer().execute()

+ 3 - 3
ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py

@@ -17,7 +17,7 @@ limitations under the License.
 
 
 """
 """
 
 
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.get_kinit_path import get_kinit_path
 from resource_management.libraries.functions.get_kinit_path import get_kinit_path
 from resource_management.libraries.script import Script
 from resource_management.libraries.script import Script
@@ -40,7 +40,7 @@ ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 stack_name = default("/hostLevelParams/stack_name", None)
 stack_name = default("/hostLevelParams/stack_name", None)
 
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
 
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 version = default("/commandParams/version", None)
 version = default("/commandParams/version", None)
@@ -55,7 +55,7 @@ sqoop_bin_dir = "/usr/bin"
 zoo_conf_dir = "/etc/zookeeper"
 zoo_conf_dir = "/etc/zookeeper"
 
 
 # HDP 2.2+ params
 # HDP 2.2+ params
-if Script.is_hdp_stack_greater_or_equal("2.2"):
+if Script.is_stack_greater_or_equal("2.2"):
   sqoop_conf_dir = '/usr/hdp/current/sqoop-client/conf'
   sqoop_conf_dir = '/usr/hdp/current/sqoop-client/conf'
   sqoop_lib = '/usr/hdp/current/sqoop-client/lib'
   sqoop_lib = '/usr/hdp/current/sqoop-client/lib'
   hadoop_home = '/usr/hdp/current/hbase-client'
   hadoop_home = '/usr/hdp/current/hbase-client'

+ 4 - 4
ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/sqoop_client.py

@@ -22,9 +22,9 @@ from resource_management.core.exceptions import ClientComponentHasNoStatus
 from resource_management.core.resources.system import Execute
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from sqoop import sqoop
 from sqoop import sqoop
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons import OSConst
 from ambari_commons import OSConst
@@ -51,9 +51,9 @@ class SqoopClientDefault(SqoopClient):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "sqoop", params.version)
       conf_select.select(params.stack_name, "sqoop", params.version)
-      hdp_select.select("sqoop-client", params.version)
+      stack_select.select("sqoop-client", params.version)
 
 
 
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)

+ 4 - 4
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/drpc_server.py

@@ -22,10 +22,10 @@ import sys
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.script import Script
 from resource_management.libraries.script import Script
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.core.resources.system import Execute
 from resource_management.core.resources.system import Execute
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from storm import storm
 from storm import storm
 from service import service
 from service import service
 from service_check import ServiceCheck
 from service_check import ServiceCheck
@@ -52,9 +52,9 @@ class DrpcServer(Script):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "storm", params.version)
       conf_select.select(params.stack_name, "storm", params.version)
-      hdp_select.select("storm-client", params.version)
+      stack_select.select("storm-client", params.version)
 
 
   def start(self, env, upgrade_type=None):
   def start(self, env, upgrade_type=None):
     import params
     import params

+ 5 - 5
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/nimbus.py

@@ -23,9 +23,9 @@ from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.script import Script
 from resource_management.libraries.script import Script
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.core.resources.system import Execute
 from resource_management.core.resources.system import Execute
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from storm import storm
 from storm import storm
 from service import service
 from service import service
 from resource_management.libraries.functions.security_commons import build_expectations, \
 from resource_management.libraries.functions.security_commons import build_expectations, \
@@ -56,10 +56,10 @@ class NimbusDefault(Nimbus):
   def pre_upgrade_restart(self, env, upgrade_type=None):
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "storm", params.version)
       conf_select.select(params.stack_name, "storm", params.version)
-      hdp_select.select("storm-client", params.version)
-      hdp_select.select("storm-nimbus", params.version)
+      stack_select.select("storm-client", params.version)
+      stack_select.select("storm-nimbus", params.version)
 
 
 
 
   def start(self, env, upgrade_type=None):
   def start(self, env, upgrade_type=None):

+ 5 - 5
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/nimbus_prod.py

@@ -23,10 +23,10 @@ from resource_management.libraries.script import Script
 from storm import storm
 from storm import storm
 from supervisord_service import supervisord_service, supervisord_check_status
 from supervisord_service import supervisord_service, supervisord_check_status
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.core.resources.system import Execute
 from resource_management.core.resources.system import Execute
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 
 
 class Nimbus(Script):
 class Nimbus(Script):
 
 
@@ -47,10 +47,10 @@ class Nimbus(Script):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "storm", params.version)
       conf_select.select(params.stack_name, "storm", params.version)
-      hdp_select.select("storm-client", params.version)
-      hdp_select.select("storm-nimbus", params.version)
+      stack_select.select("storm-client", params.version)
+      stack_select.select("storm-nimbus", params.version)
 
 
   def start(self, env, upgrade_type=None):
   def start(self, env, upgrade_type=None):
     import params
     import params

+ 5 - 5
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py

@@ -25,12 +25,12 @@ import status_params
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.get_bare_principal import get_bare_principal
 from resource_management.libraries.functions.get_bare_principal import get_bare_principal
 from resource_management.libraries.script import Script
 from resource_management.libraries.script import Script
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 
 
@@ -47,8 +47,8 @@ storm_component_home_dir = status_params.storm_component_home_dir
 conf_dir = status_params.conf_dir
 conf_dir = status_params.conf_dir
 
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
-stack_is_hdp22_or_further = Script.is_hdp_stack_greater_or_equal("2.2")
+stack_version_formatted = format_stack_version(stack_version_unformatted)
+stack_is_hdp22_or_further = Script.is_stack_greater_or_equal("2.2")
 
 
 # default hadoop params
 # default hadoop params
 rest_lib_dir = "/usr/lib/storm/contrib/storm-rest"
 rest_lib_dir = "/usr/lib/storm/contrib/storm-rest"
@@ -285,7 +285,7 @@ hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] if
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] if has_namenode else None
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] if has_namenode else None
 hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None
 hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None
 default_fs = config['configurations']['core-site']['fs.defaultFS'] if has_namenode else None
 default_fs = config['configurations']['core-site']['fs.defaultFS'] if has_namenode else None
-hadoop_bin_dir = hdp_select.get_hadoop_dir("bin") if has_namenode else None
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin") if has_namenode else None
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
 kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 
 

+ 1 - 1
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_windows.py

@@ -25,7 +25,7 @@ from resource_management.libraries.functions.default import default
 # server configurations
 # server configurations
 config = Script.get_config()
 config = Script.get_config()
 
 
-stack_is_hdp23_or_further = Script.is_hdp_stack_greater_or_equal("2.3")
+stack_is_hdp23_or_further = Script.is_stack_greater_or_equal("2.3")
 
 
 hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
 hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
 conf_dir = os.environ["STORM_CONF_DIR"]
 conf_dir = os.environ["STORM_CONF_DIR"]

+ 2 - 2
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/rest_api.py

@@ -22,10 +22,10 @@ import sys
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.script import Script
 from resource_management.libraries.script import Script
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.core.resources.system import Execute
 from resource_management.core.resources.system import Execute
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 
 
 from storm import storm
 from storm import storm
 from service import service
 from service import service

+ 1 - 1
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/status_params.py

@@ -66,7 +66,7 @@ else:
 
 
   storm_component_home_dir = "/usr/lib/storm"
   storm_component_home_dir = "/usr/lib/storm"
   conf_dir = "/etc/storm/conf"
   conf_dir = "/etc/storm/conf"
-  if Script.is_hdp_stack_greater_or_equal("2.2"):
+  if Script.is_stack_greater_or_equal("2.2"):
     storm_component_home_dir = format("/usr/hdp/current/{component_directory}")
     storm_component_home_dir = format("/usr/hdp/current/{component_directory}")
     conf_dir = format("/usr/hdp/current/{component_directory}/conf")
     conf_dir = format("/usr/hdp/current/{component_directory}/conf")
 
 

+ 1 - 1
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/storm.py

@@ -131,7 +131,7 @@ def storm(name=None):
     TemplateConfig(format("{conf_dir}/storm_jaas.conf"),
     TemplateConfig(format("{conf_dir}/storm_jaas.conf"),
                    owner=params.storm_user
                    owner=params.storm_user
     )
     )
-    if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
+    if params.stack_version_formatted != "" and compare_versions(params.stack_version_formatted, '2.2') >= 0:
       TemplateConfig(format("{conf_dir}/client_jaas.conf"),
       TemplateConfig(format("{conf_dir}/client_jaas.conf"),
                      owner=params.storm_user
                      owner=params.storm_user
       )
       )

+ 5 - 5
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/supervisor.py

@@ -22,10 +22,10 @@ import sys
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.script import Script
 from resource_management.libraries.script import Script
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.core.resources.system import Execute
 from resource_management.core.resources.system import Execute
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from storm import storm
 from storm import storm
 from service import service
 from service import service
 from ambari_commons import OSConst
 from ambari_commons import OSConst
@@ -74,10 +74,10 @@ class SupervisorDefault(Supervisor):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "storm", params.version)
       conf_select.select(params.stack_name, "storm", params.version)
-      hdp_select.select("storm-client", params.version)
-      hdp_select.select("storm-supervisor", params.version)
+      stack_select.select("storm-client", params.version)
+      stack_select.select("storm-supervisor", params.version)
 
 
   def start(self, env, upgrade_type=None):
   def start(self, env, upgrade_type=None):
     import params
     import params

+ 5 - 5
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/supervisor_prod.py

@@ -24,10 +24,10 @@ from service import service
 from supervisord_service import supervisord_service, supervisord_check_status
 from supervisord_service import supervisord_service, supervisord_check_status
 from resource_management.libraries.script import Script
 from resource_management.libraries.script import Script
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.core.resources.system import Execute
 from resource_management.core.resources.system import Execute
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
 
 
 
 
 class Supervisor(Script):
 class Supervisor(Script):
@@ -48,10 +48,10 @@ class Supervisor(Script):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+    if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "storm", params.version)
       conf_select.select(params.stack_name, "storm", params.version)
-      hdp_select.select("storm-client", params.version)
-      hdp_select.select("storm-supervisor", params.version)
+      stack_select.select("storm-client", params.version)
+      stack_select.select("storm-supervisor", params.version)
 
 
   def start(self, env, upgrade_type=None):
   def start(self, env, upgrade_type=None):
     import params
     import params

Nem az összes módosított fájl került megjelenítésre, mert túl sok fájl változott