Browse Source

AMBARI-15329: Code Cleanup: Remove hdp hardcodings in functions, variables etc. (jluniya)

Jayush Luniya 9 năm trước cách đây
mục cha
commit
456b451180
52 tập tin đã thay đổi với 139 bổ sung139 xóa
  1. 1 1
      ambari-metrics/ambari-metrics-timelineservice/src/main/python/amc_service.py
  2. 2 2
      ambari-metrics/ambari-metrics-timelineservice/src/main/python/main.py
  3. 1 1
      ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_windows.py
  4. 1 1
      ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/service_check.py
  5. 1 1
      ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_check.py
  6. 1 1
      ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params_windows.py
  7. 1 1
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_windows.py
  8. 1 1
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
  9. 3 3
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/setup_ranger_hbase.py
  10. 2 2
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
  11. 5 5
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/setup_ranger_hdfs.py
  12. 6 6
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/status_params.py
  13. 1 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py
  14. 3 3
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
  15. 2 2
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py
  16. 1 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py
  17. 3 3
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_ranger_hive.py
  18. 1 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
  19. 2 2
      ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
  20. 2 2
      ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_windows.py
  21. 3 3
      ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/setup_ranger_knox.py
  22. 1 1
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_windows.py
  23. 1 1
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
  24. 2 2
      ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_windows.py
  25. 1 1
      ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
  26. 3 3
      ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py
  27. 2 2
      ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py
  28. 3 3
      ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py
  29. 2 2
      ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_windows.py
  30. 1 1
      ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py
  31. 1 1
      ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_windows.py
  32. 1 1
      ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/service_check.py
  33. 1 1
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_windows.py
  34. 1 1
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/service_check.py
  35. 3 3
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/setup_ranger_storm.py
  36. 2 2
      ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_windows.py
  37. 1 1
      ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
  38. 2 2
      ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/params_windows.py
  39. 1 1
      ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/service_check.py
  40. 10 10
      ambari-server/src/main/resources/host_scripts/alert_disk_space.py
  41. 1 1
      ambari-server/src/main/resources/stacks/BIGTOP/0.8/hooks/after-INSTALL/scripts/hook.py
  42. 1 1
      ambari-server/src/main/resources/stacks/BIGTOP/0.8/hooks/after-INSTALL/scripts/params.py
  43. 1 1
      ambari-server/src/main/resources/stacks/BIGTOP/0.8/hooks/after-INSTALL/scripts/shared_initialization.py
  44. 1 1
      ambari-server/src/main/resources/stacks/BIGTOP/0.8/hooks/before-ANY/scripts/params.py
  45. 6 6
      ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/status_params.py
  46. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py
  47. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
  48. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
  49. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
  50. 2 2
      ambari-server/src/main/resources/stacks/HDPWIN/2.1/hooks/after-INSTALL/scripts/params.py
  51. 4 4
      ambari-server/src/test/python/TestVersionSelectUtil.py
  52. 36 36
      ambari-server/src/test/python/custom_actions/TestInstallPackages.py

+ 1 - 1
ambari-metrics/ambari-metrics-timelineservice/src/main/python/amc_service.py

@@ -165,7 +165,7 @@ def init_service_debug(options):
     sys.frozen = 'windows_exe'  # Fake py2exe so we can debug
 
 
-def ensure_hdp_service_soft_dependencies():
+def ensure_hadoop_service_soft_dependencies():
   if SERVICE_STATUS_RUNNING != WinServiceController.QueryStatus(EMBEDDED_HBASE_MASTER_SERVICE):
     err = 'ERROR: Service "{0}" was not started.'.format(EMBEDDED_HBASE_MASTER_SERVICE)
     raise FatalException(1, err)

+ 2 - 2
ambari-metrics/ambari-metrics-timelineservice/src/main/python/main.py

@@ -109,8 +109,8 @@ def server_process_main(options, scmStatus=None):
 
   #Ensure the 3 Hadoop services required are started on the local machine
   if not options.no_embedded_hbase:
-    from amc_service import ensure_hdp_service_soft_dependencies
-    ensure_hdp_service_soft_dependencies()
+    from amc_service import ensure_hadoop_service_soft_dependencies
+    ensure_hadoop_service_soft_dependencies()
 
   if scmStatus is not None:
     scmStatus.reportStartPending()

+ 1 - 1
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_windows.py

@@ -35,7 +35,7 @@ falcon_home = None
 falcon_log_dir = "."
 
 if os.environ.has_key("HADOOP_HOME"):
-  hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"], ".."))
+  stack_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"], ".."))
 
 if os.environ.has_key("FALCON_CONF_DIR"):
   falcon_conf_dir = os.environ["FALCON_CONF_DIR"]

+ 1 - 1
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/service_check.py

@@ -45,7 +45,7 @@ class FalconServiceCheckWindows(FalconServiceCheck):
   def service_check(self, env):
     import params
     env.set_params(params)
-    smoke_cmd = os.path.join(params.hdp_root,"Run-SmokeTests.cmd")
+    smoke_cmd = os.path.join(params.stack_root,"Run-SmokeTests.cmd")
     service = "FALCON"
     Execute(format("cmd /C {smoke_cmd} {service}"), user=params.falcon_user, logoutput=True, tries = 3, try_sleep = 20)
 

+ 1 - 1
ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_check.py

@@ -27,7 +27,7 @@ class FlumeServiceCheck(Script):
   def service_check(self, env):
     import params
     env.set_params(params)
-    smoke_cmd = os.path.join(params.hdp_root,"Run-SmokeTests.cmd")
+    smoke_cmd = os.path.join(params.stack_root,"Run-SmokeTests.cmd")
     service = "FLUME"
     Execute(format("cmd /C {smoke_cmd} {service}"), logoutput=True, user=params.hdfs_user)
 

+ 1 - 1
ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params_windows.py

@@ -26,7 +26,7 @@ config = Script.get_config()
 
 hadoop_user = config["configurations"]["cluster-env"]["hadoop.user.name"]
 
-hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
+stack_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
 flume_home = os.environ['FLUME_HOME']
 flume_conf_dir = os.path.join(flume_home, 'conf')
 flume_user = hadoop_user

+ 1 - 1
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_windows.py

@@ -27,7 +27,7 @@ config = Script.get_config()
 hbase_conf_dir = os.environ["HBASE_CONF_DIR"]
 hbase_bin_dir = os.path.join(os.environ["HBASE_HOME"],'bin')
 hbase_executable = os.path.join(hbase_bin_dir,"hbase.cmd")
-hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
+stack_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
 hadoop_user = config["configurations"]["cluster-env"]["hadoop.user.name"]
 hbase_user = hadoop_user
 

+ 1 - 1
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py

@@ -34,7 +34,7 @@ class HbaseServiceCheckWindows(HbaseServiceCheck):
   def service_check(self, env):
     import params
     env.set_params(params)
-    smoke_cmd = os.path.join(params.hdp_root, "Run-SmokeTests.cmd")
+    smoke_cmd = os.path.join(params.stack_root, "Run-SmokeTests.cmd")
     service = "HBASE"
     Execute(format("cmd /C {smoke_cmd} {service}"), user=params.hbase_user, logoutput=True)
 

+ 3 - 3
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/setup_ranger_hbase.py

@@ -29,10 +29,10 @@ def setup_ranger_hbase(upgrade_type=None):
     else:
       from resource_management.libraries.functions.setup_ranger_plugin import setup_ranger_plugin
     
-    hdp_version = None
+    stack_version = None
 
     if upgrade_type is not None:
-      hdp_version = params.version
+      stack_version = params.version
 
     if params.retryAble:
       Logger.info("HBase: Setup ranger: command retry enables thus retrying if ranger admin is down !")
@@ -80,6 +80,6 @@ def setup_ranger_hbase(upgrade_type=None):
                         component_list=['hbase-client', 'hbase-master', 'hbase-regionserver'], audit_db_is_enabled=params.xa_audit_db_is_enabled,
                         credential_file=params.credential_file, xa_audit_db_password=params.xa_audit_db_password, 
                         ssl_truststore_password=params.ssl_truststore_password, ssl_keystore_password=params.ssl_keystore_password,
-                        hdp_version_override = hdp_version, skip_if_rangeradmin_down= not params.retryAble)
+                        stack_version_override = stack_version, skip_if_rangeradmin_down= not params.retryAble)
   else:
     Logger.info('Ranger admin not installed')

+ 2 - 2
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py

@@ -26,12 +26,12 @@ from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.version import compare_versions, format_stack_version
 from resource_management.libraries.functions.decorator import retry
 
-def prestart(env, hdp_component):
+def prestart(env, stack_component):
   import params
 
   if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
     conf_select.select(params.stack_name, "hbase", params.version)
-    stack_select.select(hdp_component, params.version)
+    stack_select.select(stack_component, params.version)
 
 def post_regionserver(env):
   import params

+ 5 - 5
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/setup_ranger_hdfs.py

@@ -34,10 +34,10 @@ def setup_ranger_hdfs(upgrade_type=None):
     else:
       from resource_management.libraries.functions.setup_ranger_plugin import setup_ranger_plugin
 
-    hdp_version = None
+    stack_version = None
 
     if upgrade_type is not None:
-      hdp_version = params.version
+      stack_version = params.version
 
     if params.retryAble:
       Logger.info("HDFS: Setup ranger: command retry enables thus retrying if ranger admin is down !")
@@ -58,11 +58,11 @@ def setup_ranger_hdfs(upgrade_type=None):
                         component_list=['hadoop-client'], audit_db_is_enabled=params.xa_audit_db_is_enabled,
                         credential_file=params.credential_file, xa_audit_db_password=params.xa_audit_db_password, 
                         ssl_truststore_password=params.ssl_truststore_password, ssl_keystore_password=params.ssl_keystore_password,
-                        hdp_version_override = hdp_version, skip_if_rangeradmin_down= not params.retryAble)
+                        stack_version_override = stack_version, skip_if_rangeradmin_down= not params.retryAble)
 
-    if hdp_version and params.upgrade_direction == Direction.UPGRADE:
+    if stack_version and params.upgrade_direction == Direction.UPGRADE:
       # when upgrading to 2.3+, this env file must be removed
-      if compare_versions(hdp_version, '2.3', format=True) > 0:
+      if compare_versions(stack_version, '2.3', format=True) > 0:
         source_file = os.path.join(params.hadoop_conf_dir, 'set-hdfs-plugin-env.sh')
         target_file = source_file + ".bak"
         Execute(("mv", source_file, target_file), sudo=True, only_if=format("test -f {source_file}"))

+ 6 - 6
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/status_params.py

@@ -36,12 +36,12 @@ if OSCheck.is_windows_family():
 else:
   hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
   hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-  hdp_pid_dir = format("{hadoop_pid_dir_prefix}/{hdfs_user}")
-  datanode_pid_file = format("{hdp_pid_dir}/hadoop-{hdfs_user}-datanode.pid")
-  namenode_pid_file = format("{hdp_pid_dir}/hadoop-{hdfs_user}-namenode.pid")
-  snamenode_pid_file = format("{hdp_pid_dir}/hadoop-{hdfs_user}-secondarynamenode.pid")
-  journalnode_pid_file = format("{hdp_pid_dir}/hadoop-{hdfs_user}-journalnode.pid")
-  zkfc_pid_file = format("{hdp_pid_dir}/hadoop-{hdfs_user}-zkfc.pid")
+  hadoop_pid_dir = format("{hadoop_pid_dir_prefix}/{hdfs_user}")
+  datanode_pid_file = format("{hadoop_pid_dir}/hadoop-{hdfs_user}-datanode.pid")
+  namenode_pid_file = format("{hadoop_pid_dir}/hadoop-{hdfs_user}-namenode.pid")
+  snamenode_pid_file = format("{hadoop_pid_dir}/hadoop-{hdfs_user}-secondarynamenode.pid")
+  journalnode_pid_file = format("{hadoop_pid_dir}/hadoop-{hdfs_user}-journalnode.pid")
+  zkfc_pid_file = format("{hadoop_pid_dir}/hadoop-{hdfs_user}-zkfc.pid")
   nfsgateway_pid_file = format("{hadoop_pid_dir_prefix}/root/hadoop_privileged_nfs3.pid")
 
   # Security related/required params

+ 1 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py

@@ -27,7 +27,7 @@ from ambari_commons import OSConst
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
 def hcat_service_check():
   import params
-  smoke_cmd = os.path.join(params.hdp_root, "Run-SmokeTests.cmd")
+  smoke_cmd = os.path.join(params.stack_root, "Run-SmokeTests.cmd")
   service = "HCatalog"
   Execute(format("cmd /C {smoke_cmd} {service}"), user=params.hcat_user, logoutput=True)
 

+ 3 - 3
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py

@@ -121,7 +121,7 @@ def _get_current_hiveserver_version():
     if formatted_source_version and compare_versions(formatted_source_version, "2.2") >= 0:
       version_hive_bin = format('/usr/hdp/{source_version}/hive/bin')
     command = format('{version_hive_bin}/hive --version')
-    return_code, hdp_output = shell.call(command, user=params.hive_user, path=hive_execute_path)
+    return_code, output = shell.call(command, user=params.hive_user, path=hive_execute_path)
   except Exception, e:
     Logger.error(str(e))
     raise Fail('Unable to execute hive --version command to retrieve the hiveserver2 version.')
@@ -129,12 +129,12 @@ def _get_current_hiveserver_version():
   if return_code != 0:
     raise Fail('Unable to determine the current HiveServer2 version because of a non-zero return code of {0}'.format(str(return_code)))
 
-  match = re.search('^(Hive) ([0-9]+.[0-9]+.\S+)', hdp_output, re.MULTILINE)
+  match = re.search('^(Hive) ([0-9]+.[0-9]+.\S+)', output, re.MULTILINE)
 
   if match:
     current_hive_server_version = match.group(2)
     return current_hive_server_version
   else:
-    raise Fail('The extracted hiveserver2 version "{0}" does not matching any known pattern'.format(hdp_output))
+    raise Fail('The extracted hiveserver2 version "{0}" does not matching any known pattern'.format(output))
 
 

+ 2 - 2
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py

@@ -28,7 +28,7 @@ config = Script.get_config()
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version_formatted = format_stack_version(stack_version_unformatted)
 
-hdp_root = None
+stack_root = None
 hive_conf_dir = None
 hive_home = None
 hive_lib_dir = None
@@ -39,7 +39,7 @@ hcat_config_dir = None
 hive_bin = None
 
 try:
-  hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
+  stack_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
   hive_conf_dir = os.environ["HIVE_CONF_DIR"]
   hive_home = os.environ["HIVE_HOME"]
   hive_lib_dir = os.environ["HIVE_LIB_DIR"]

+ 1 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py

@@ -37,7 +37,7 @@ class HiveServiceCheckWindows(HiveServiceCheck):
   def service_check(self, env):
     import params
     env.set_params(params)
-    smoke_cmd = os.path.join(params.hdp_root,"Run-SmokeTests.cmd")
+    smoke_cmd = os.path.join(params.stack_root,"Run-SmokeTests.cmd")
     service = "HIVE"
     Execute(format("cmd /C {smoke_cmd} {service}"), user=params.hive_user, logoutput=True)
 

+ 3 - 3
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_ranger_hive.py

@@ -29,10 +29,10 @@ def setup_ranger_hive(upgrade_type = None):
     else:
       from resource_management.libraries.functions.setup_ranger_plugin import setup_ranger_plugin
     
-    hdp_version = None
+    stack_version = None
 
     if upgrade_type is not None:
-      hdp_version = params.version
+      stack_version = params.version
 
     if params.retryAble:
       Logger.info("Hive: Setup ranger: command retry enables thus retrying if ranger admin is down !")
@@ -72,6 +72,6 @@ def setup_ranger_hive(upgrade_type = None):
                         component_list=['hive-client', 'hive-metastore', 'hive-server2'], audit_db_is_enabled=params.xa_audit_db_is_enabled,
                         credential_file=params.credential_file, xa_audit_db_password=params.xa_audit_db_password, 
                         ssl_truststore_password=params.ssl_truststore_password, ssl_keystore_password=params.ssl_keystore_password,
-                        hdp_version_override = hdp_version, skip_if_rangeradmin_down= not params.retryAble)
+                        stack_version_override = stack_version, skip_if_rangeradmin_down= not params.retryAble)
   else:
     Logger.info('Ranger admin not installed')

+ 1 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py

@@ -32,7 +32,7 @@ def webhcat_service_check():
   # AMBARI-11633 [WinTP2] Webhcat service check fails
   # Hive doesn't pass the environment variables correctly to child processes, which fails the smoke test.
   # Reducing the amount of URLs checked to the minimum required.
-  #smoke_cmd = os.path.join(params.hdp_root,"Run-SmokeTests.cmd")
+  #smoke_cmd = os.path.join(params.stack_root,"Run-SmokeTests.cmd")
   #service = "WEBHCAT"
   #Execute(format("cmd /C {smoke_cmd} {service}"), user=params.hcat_user, logoutput=True)
 

+ 2 - 2
ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py

@@ -248,8 +248,8 @@ if has_ranger_admin and is_supported_kafka_ranger:
   ssl_truststore_password = unicode(config['configurations']['ranger-kafka-policymgr-ssl']['xasecure.policymgr.clientssl.truststore.password']) if xml_configurations_supported else None
   credential_file = format('/etc/ranger/{repo_name}/cred.jceks') if xml_configurations_supported else None
 
-  hdp_version = get_stack_version('kafka-broker')
-  setup_ranger_env_sh_source = format('/usr/hdp/{hdp_version}/ranger-kafka-plugin/install/conf.templates/enable/kafka-ranger-env.sh')
+  stack_version = get_stack_version('kafka-broker')
+  setup_ranger_env_sh_source = format('/usr/hdp/{stack_version}/ranger-kafka-plugin/install/conf.templates/enable/kafka-ranger-env.sh')
   setup_ranger_env_sh_target = format("{conf_dir}/kafka-ranger-env.sh")
 
   #For SQLA explicitly disable audit to DB for Ranger

+ 2 - 2
ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_windows.py

@@ -25,7 +25,7 @@ from status_params import *
 # server configurations
 config = Script.get_config()
 
-hdp_root = None
+stack_root = None
 knox_home = None
 knox_conf_dir = None
 knox_logs_dir = None
@@ -38,7 +38,7 @@ knox_master_secret_path = None
 knox_cert_store_path = None
 
 try:
-  hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
+  stack_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
   knox_home = os.environ['KNOX_HOME']
   knox_conf_dir = os.environ['KNOX_CONF_DIR']
   knox_logs_dir = os.environ['KNOX_LOG_DIR']

+ 3 - 3
ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/setup_ranger_knox.py

@@ -29,9 +29,9 @@ def setup_ranger_knox(upgrade_type=None):
     else:
       from resource_management.libraries.functions.setup_ranger_plugin import setup_ranger_plugin
     
-    hdp_version = None
+    stack_version = None
     if upgrade_type is not None:
-      hdp_version = params.version
+      stack_version = params.version
 
     if params.retryAble:
       Logger.info("Knox: Setup ranger: command retry enables thus retrying if ranger admin is down !")
@@ -72,6 +72,6 @@ def setup_ranger_knox(upgrade_type=None):
                         component_list=['knox-server'], audit_db_is_enabled=params.xa_audit_db_is_enabled,
                         credential_file=params.credential_file, xa_audit_db_password=params.xa_audit_db_password, 
                         ssl_truststore_password=params.ssl_truststore_password, ssl_keystore_password=params.ssl_keystore_password,
-                        hdp_version_override = hdp_version, skip_if_rangeradmin_down= not params.retryAble)
+                        stack_version_override = stack_version, skip_if_rangeradmin_down= not params.retryAble)
   else:
     Logger.info('Ranger admin not installed')

+ 1 - 1
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_windows.py

@@ -24,7 +24,7 @@ from status_params import *
 config = Script.get_config()
 
 hadoop_user = config["configurations"]["cluster-env"]["hadoop.user.name"]
-hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"], ".."))
+stack_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"], ".."))
 oozie_root = os.environ['OOZIE_ROOT']
 oozie_home = os.environ['OOZIE_HOME']
 oozie_conf_dir = os.path.join(oozie_home,'conf')

+ 1 - 1
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py

@@ -130,7 +130,7 @@ class OozieServiceCheckWindows(OozieServiceCheck):
     import params
 
     env.set_params(params)
-    smoke_cmd = os.path.join(params.hdp_root, "Run-SmokeTests.cmd")
+    smoke_cmd = os.path.join(params.stack_root, "Run-SmokeTests.cmd")
     service = "OOZIE"
     Execute(format("cmd /C {smoke_cmd} {service}"), logoutput=True)
 

+ 2 - 2
ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_windows.py

@@ -23,11 +23,11 @@ from resource_management import *
 # server configurations
 config = Script.get_config()
 
-hdp_root = None
+stack_root = None
 pig_home = None
 pig_conf_dir = None
 try:
-  hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
+  stack_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
   pig_home = os.environ['PIG_HOME']
   pig_conf_dir = os.path.join(pig_home,'conf')
 except:

+ 1 - 1
ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py

@@ -125,7 +125,7 @@ class PigServiceCheckWindows(PigServiceCheck):
   def service_check(self, env):
     import params
     env.set_params(params)
-    smoke_cmd = os.path.join(params.hdp_root,"Run-SmokeTests.cmd")
+    smoke_cmd = os.path.join(params.stack_root,"Run-SmokeTests.cmd")
     service = "PIG"
     Execute(format("cmd /C {smoke_cmd} {service}", smoke_cmd=smoke_cmd, service=service), logoutput=True, user=params.pig_user, timeout=300)
 

+ 3 - 3
ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py

@@ -23,9 +23,9 @@ from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.format import format
 
-def prestart(env, hdp_component):
+def prestart(env, stack_component):
   import params
 
   if params.version and params.stack_is_hdp22_or_further:
-    conf_select.select(params.stack_name, hdp_component, params.version)
-    stack_select.select(hdp_component, params.version)
+    conf_select.select(params.stack_name, stack_component, params.version)
+    stack_select.select(stack_component, params.version)

+ 2 - 2
ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py

@@ -17,7 +17,7 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.script import Script
 from resource_management.core.resources.system import Execute
 from resource_management.core.exceptions import ComponentIsNotRunning
@@ -82,7 +82,7 @@ class KmsServer(Script):
     import params
     env.set_params(params)
 
-    upgrade_stack = hdp_select._get_upgrade_stack()
+    upgrade_stack = stack_select._get_upgrade_stack()
     if upgrade_stack is None:
       raise Fail('Unable to determine the stack and stack version')
 

+ 3 - 3
ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py

@@ -22,9 +22,9 @@ from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.format import format
 
-def prestart(env, hdp_component):
+def prestart(env, stack_component):
   import params
 
   if params.version and params.stack_is_hdp23_or_further:
-    conf_select.select(params.stack_name, hdp_component, params.version)
-    stack_select.select(hdp_component, params.version)
+    conf_select.select(params.stack_name, stack_component, params.version)
+    stack_select.select(stack_component, params.version)

+ 2 - 2
ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_windows.py

@@ -24,13 +24,13 @@ import os
 # server configurations
 config = Script.get_config()
 
-hdp_root = None
+stack_root = None
 slider_home = None
 slider_bin_dir = None
 slider_conf_dir = None
 storm_slider_conf_dir = None
 try:
-  hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
+  stack_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
   slider_home = os.environ['SLIDER_HOME']
   slider_bin_dir = os.path.join(slider_home, 'bin')
   slider_conf_dir = os.path.join(slider_home, 'conf')

+ 1 - 1
ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py

@@ -29,7 +29,7 @@ class SliderServiceCheck(Script):
   def service_check(self, env):
     import params
     env.set_params(params)
-    smoke_cmd = os.path.join(params.hdp_root, "Run-SmokeTests.cmd")
+    smoke_cmd = os.path.join(params.stack_root, "Run-SmokeTests.cmd")
     service = "SLIDER"
     Execute(format("cmd /C {smoke_cmd} {service}"), logoutput=True, user=params.hdfs_user)
 

+ 1 - 1
ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_windows.py

@@ -24,7 +24,7 @@ config = Script.get_config()
 
 sqoop_user = "sqoop"
 
-hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"], ".."))
+stack_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"], ".."))
 sqoop_env_cmd_template = config['configurations']['sqoop-env']['content']
 sqoop_home_dir = os.environ["SQOOP_HOME"]
 sqoop_conf_dir = os.path.join(sqoop_home_dir, "conf")

+ 1 - 1
ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/service_check.py

@@ -53,7 +53,7 @@ class SqoopServiceCheckWindows(SqoopServiceCheck):
   def service_check(self, env):
     import params
     env.set_params(params)
-    smoke_cmd = os.path.join(params.hdp_root,"Run-SmokeTests.cmd")
+    smoke_cmd = os.path.join(params.stack_root,"Run-SmokeTests.cmd")
     service = "SQOOP"
     Execute(format("cmd /C {smoke_cmd} {service}"), logoutput=True)
 

+ 1 - 1
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_windows.py

@@ -27,7 +27,7 @@ config = Script.get_config()
 
 stack_is_hdp23_or_further = Script.is_stack_greater_or_equal("2.3")
 
-hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
+stack_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
 conf_dir = os.environ["STORM_CONF_DIR"]
 hadoop_user = config["configurations"]["cluster-env"]["hadoop.user.name"]
 storm_user = hadoop_user

+ 1 - 1
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/service_check.py

@@ -38,7 +38,7 @@ class ServiceCheckWindows(ServiceCheck):
   def service_check(self, env):
     import params
     env.set_params(params)
-    smoke_cmd = os.path.join(params.hdp_root,"Run-SmokeTests.cmd")
+    smoke_cmd = os.path.join(params.stack_root,"Run-SmokeTests.cmd")
     service = "STORM"
     Execute(format("cmd /C {smoke_cmd} {service}", smoke_cmd=smoke_cmd, service=service), user=params.storm_user, logoutput=True)
 

+ 3 - 3
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/setup_ranger_storm.py

@@ -32,9 +32,9 @@ def setup_ranger_storm(upgrade_type=None):
     else:
       from resource_management.libraries.functions.setup_ranger_plugin import setup_ranger_plugin
     
-    hdp_version = None
+    stack_version = None
     if upgrade_type is not None:
-      hdp_version = params.version
+      stack_version = params.version
 
     if params.retryAble:
       Logger.info("Storm: Setup ranger: command retry enables thus retrying if ranger admin is down !")
@@ -75,6 +75,6 @@ def setup_ranger_storm(upgrade_type=None):
                         component_list=['storm-client', 'storm-nimbus'], audit_db_is_enabled=params.xa_audit_db_is_enabled,
                         credential_file=params.credential_file, xa_audit_db_password=params.xa_audit_db_password, 
                         ssl_truststore_password=params.ssl_truststore_password, ssl_keystore_password=params.ssl_keystore_password,
-                        hdp_version_override = hdp_version, skip_if_rangeradmin_down= not params.retryAble)
+                        stack_version_override = stack_version, skip_if_rangeradmin_down= not params.retryAble)
   else:
     Logger.info('Ranger admin not installed')

+ 2 - 2
ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_windows.py

@@ -36,9 +36,9 @@ except KeyError:
 
 stack_version_formatted = ""
 
-hdp_root = None
+stack_root = None
 try:
-  hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"], ".."))
+  stack_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"], ".."))
 except:
   pass
 

+ 1 - 1
ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py

@@ -100,7 +100,7 @@ class TezServiceCheckWindows(TezServiceCheck):
   def service_check(self, env):
     import params
     env.set_params(params)
-    smoke_cmd = os.path.join(params.hdp_root,"Run-SmokeTests.cmd")
+    smoke_cmd = os.path.join(params.stack_root,"Run-SmokeTests.cmd")
     service = "TEZ"
     Execute(format("cmd /C {smoke_cmd} {service}"), logoutput=True, user=params.tez_user)
 

+ 2 - 2
ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/params_windows.py

@@ -26,11 +26,11 @@ import status_params
 config = Script.get_config()
 
 config_dir = None
-hdp_root = None
+stack_root = None
 try:
   # not used zookeeper_home_dir = os.environ["ZOOKEEPER_HOME"]
   config_dir = os.environ["ZOOKEEPER_CONF_DIR"]
-  hdp_root = os.environ["HADOOP_NODE_INSTALL_ROOT"]
+  stack_root = os.environ["HADOOP_NODE_INSTALL_ROOT"]
 except:
   pass
 

+ 1 - 1
ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/service_check.py

@@ -65,7 +65,7 @@ class ZookeeperServiceCheckWindows(ZookeeperServiceCheck):
     import params
     env.set_params(params)
 
-    smoke_cmd = os.path.join(params.hdp_root,"Run-SmokeTests.cmd")
+    smoke_cmd = os.path.join(params.stack_root,"Run-SmokeTests.cmd")
     service = "Zookeeper"
     Execute(format("cmd /C {smoke_cmd} {service}"), user=params.zk_user, logoutput=True, tries=3, try_sleep=20)
 

+ 10 - 10
ambari-server/src/main/resources/host_scripts/alert_disk_space.py

@@ -37,10 +37,10 @@ PERCENT_USED_WARNING_DEFAULT = 50
 PERCENT_USED_CRITICAL_DEFAULT = 80
 
 # the location where HDP installs components when using HDP 2.2+
-HDP_HOME_DIR = "/usr/hdp"
+STACK_HOME_DIR = "/usr/hdp"
 
 # the location where HDP installs components when using HDP 2.0 to 2.1
-HDP_HOME_LEGACY_DIR = "/usr/lib"
+STACK_HOME_LEGACY_DIR = "/usr/lib"
 
 def get_tokens():
   """
@@ -66,14 +66,14 @@ def execute(configurations={}, parameters={}, host_name=None):
   """
 
   # determine the location of HDP home
-  hdp_home = None
-  if os.path.isdir(HDP_HOME_DIR):
-    hdp_home = HDP_HOME_DIR
-  elif os.path.isdir(HDP_HOME_LEGACY_DIR):
-    hdp_home = HDP_HOME_LEGACY_DIR
-
-  # if hdp home was found, use it; otherwise default to None
-  path = hdp_home if hdp_home is not None else None
+  stack_home = None
+  if os.path.isdir(STACK_HOME_DIR):
+    stack_home = STACK_HOME_DIR
+  elif os.path.isdir(STACK_HOME_LEGACY_DIR):
+    stack_home = STACK_HOME_LEGACY_DIR
+
+  # if stack home was found, use it; otherwise default to None
+  path = stack_home if stack_home is not None else None
 
   try:
     disk_usage = _get_disk_usage(path)

+ 1 - 1
ambari-server/src/main/resources/stacks/BIGTOP/0.8/hooks/after-INSTALL/scripts/hook.py

@@ -28,7 +28,7 @@ class AfterInstallHook(Hook):
     import params
 
     env.set_params(params)
-    setup_hdp_install_directory()
+    setup_install_directory()
     setup_config()
 
 if __name__ == "__main__":

+ 1 - 1
ambari-server/src/main/resources/stacks/BIGTOP/0.8/hooks/after-INSTALL/scripts/params.py

@@ -35,7 +35,7 @@ else:
 
 hadoop_conf_dir = "/etc/hadoop/conf"
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
-versioned_hdp_root = '/usr/bigtop/current'
+versioned_stack_root = '/usr/bigtop/current'
 #security params
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 #java params

+ 1 - 1
ambari-server/src/main/resources/stacks/BIGTOP/0.8/hooks/after-INSTALL/scripts/shared_initialization.py

@@ -19,7 +19,7 @@ limitations under the License.
 import os
 from resource_management import *
 
-def setup_hdp_install_directory():
+def setup_install_directory():
   import params
   if params.rpm_version:
     Execute(format('ambari-python-wrap /usr/bin/bigtop-select set all `ambari-python-wrap /usr/bin/bigtop-select versions | grep ^{rpm_version}- | tail -1`'),

+ 1 - 1
ambari-server/src/main/resources/stacks/BIGTOP/0.8/hooks/before-ANY/scripts/params.py

@@ -47,7 +47,7 @@ else:
 
 hadoop_conf_dir = "/etc/hadoop/conf"
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
-versioned_hdp_root = '/usr/bigtop/current'
+versioned_stack_root = '/usr/bigtop/current'
 #security params
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 

+ 6 - 6
ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/status_params.py

@@ -23,9 +23,9 @@ config = Script.get_config()
 
 hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-hdp_pid_dir = format("{hadoop_pid_dir_prefix}/{hdfs_user}")
-datanode_pid_file = format("{hdp_pid_dir}/hadoop-{hdfs_user}-datanode.pid")
-namenode_pid_file = format("{hdp_pid_dir}/hadoop-{hdfs_user}-namenode.pid")
-snamenode_pid_file = format("{hdp_pid_dir}/hadoop-{hdfs_user}-secondarynamenode.pid")
-journalnode_pid_file = format("{hdp_pid_dir}/hadoop-{hdfs_user}-journalnode.pid")
-zkfc_pid_file = format("{hdp_pid_dir}/hadoop-{hdfs_user}-zkfc.pid")
+hadoop_pid_dir = format("{hadoop_pid_dir_prefix}/{hdfs_user}")
+datanode_pid_file = format("{hadoop_pid_dir}/hadoop-{hdfs_user}-datanode.pid")
+namenode_pid_file = format("{hadoop_pid_dir}/hadoop-{hdfs_user}-namenode.pid")
+snamenode_pid_file = format("{hadoop_pid_dir}/hadoop-{hdfs_user}-secondarynamenode.pid")
+journalnode_pid_file = format("{hadoop_pid_dir}/hadoop-{hdfs_user}-journalnode.pid")
+zkfc_pid_file = format("{hadoop_pid_dir}/hadoop-{hdfs_user}-zkfc.pid")

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py

@@ -20,7 +20,7 @@ limitations under the License.
 from resource_management.libraries.script.hook import Hook
 from shared_initialization import link_configs
 from shared_initialization import setup_config
-from shared_initialization import setup_hdp_symlinks
+from shared_initialization import setup_stack_symlinks
 
 class AfterInstallHook(Hook):
 
@@ -28,7 +28,7 @@ class AfterInstallHook(Hook):
     import params
 
     env.set_params(params)
-    setup_hdp_symlinks()
+    setup_stack_symlinks()
     setup_config()
 
     link_configs(self.stroutfile)

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py

@@ -49,7 +49,7 @@ if Script.is_stack_greater_or_equal("2.2"):
   # not supported in HDP 2.2+
   hadoop_conf_empty_dir = None
 
-versioned_hdp_root = '/usr/hdp/current'
+versioned_stack_root = '/usr/hdp/current'
 
 #security params
 security_enabled = config['configurations']['cluster-env']['security_enabled']

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py

@@ -28,7 +28,7 @@ from resource_management.libraries.resources.xml_config import XmlConfig
 from resource_management.libraries.script import Script
 
 
-def setup_hdp_symlinks():
+def setup_stack_symlinks():
   """
   Invokes hdp-select set all against a calculated fully-qualified, "normalized" version based on a
   stack version, such as "2.3". This should always be called after a component has been

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py

@@ -103,7 +103,7 @@ hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec", force_latest_on_upgr
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 hadoop_secure_dn_user = hdfs_user
 hadoop_dir = "/etc/hadoop"
-versioned_hdp_root = '/usr/hdp/current'
+versioned_stack_root = '/usr/hdp/current'
 hadoop_java_io_tmpdir = os.path.join(tmp_dir, "hadoop_java_io_tmpdir")
 datanode_max_locked_memory = config['configurations']['hdfs-site']['dfs.datanode.max.locked.memory']
 is_datanode_max_locked_memory_set = not is_empty(config['configurations']['hdfs-site']['dfs.datanode.max.locked.memory'])

+ 2 - 2
ambari-server/src/main/resources/stacks/HDPWIN/2.1/hooks/after-INSTALL/scripts/params.py

@@ -39,8 +39,8 @@ if os.environ.has_key("HADOOP_NODE_INSTALL_ROOT"):
   hadoop_install_root = os.environ["HADOOP_NODE_INSTALL_ROOT"]
 
 
-hdp_log_dir = "c:\\hadoop\\logs"
-hdp_data_dir = "c:\\hadoop"
+stack_log_dir = "c:\\hadoop\\logs"
+stack_data_dir = "c:\\hadoop"
 db_flavor = "MSSQL"
 db_hostname = "localhost"
 db_port = "1433"

+ 4 - 4
ambari-server/src/test/python/TestVersionSelectUtil.py

@@ -39,7 +39,7 @@ class TestVersionSelectUtil(TestCase):
   @patch('__builtin__.open')
   @patch("resource_management.core.shell.call")
   def test_get_component_version(self, call_mock, open_mock):
-    hdp_expected_version = "2.2.1.0-2175"
+    stack_expected_version = "2.2.1.0-2175"
 
     # Mock classes for reading from a file
     class MagicFile(object):
@@ -49,7 +49,7 @@ class TestVersionSelectUtil(TestCase):
                            "zookeeper-client"
                            ])
       def read(self, value):
-        return (value + " - " + hdp_expected_version) if value in self.allowed_names else ("ERROR: Invalid package - " + value)
+        return (value + " - " + stack_expected_version) if value in self.allowed_names else ("ERROR: Invalid package - " + value)
 
       def __exit__(self, exc_type, exc_val, exc_tb):
         pass
@@ -88,6 +88,6 @@ class TestVersionSelectUtil(TestCase):
 
     # Pass
     version = self.module.get_component_version("HDP", "hadoop-hdfs-namenode")
-    self.assertEquals(version, hdp_expected_version)
+    self.assertEquals(version, stack_expected_version)
     version = self.module.get_component_version("HDP", "hadoop-hdfs-datanode")
-    self.assertEquals(version, hdp_expected_version)
+    self.assertEquals(version, stack_expected_version)

+ 36 - 36
ambari-server/src/test/python/custom_actions/TestInstallPackages.py

@@ -70,9 +70,9 @@ class TestInstallPackages(RMFTestCase):
   def test_normal_flow_rhel(self,
                             write_actual_version_to_history_file_mock,
                             read_actual_version_from_history_file_mock,
-                            hdp_versions_mock,
+                            stack_versions_mock,
                             put_structured_out_mock, allInstalledPackages_mock, list_ambari_managed_repos_mock):
-    hdp_versions_mock.side_effect = [
+    stack_versions_mock.side_effect = [
       [],  # before installation attempt
       [VERSION_STUB]
     ]
@@ -129,10 +129,10 @@ class TestInstallPackages(RMFTestCase):
   @patch("resource_management.libraries.functions.repo_version_history.write_actual_version_to_history_file")
   def test_normal_flow_sles(self, write_actual_version_to_history_file_mock,
                             read_actual_version_from_history_file_mock,
-                            hdp_versions_mock, put_structured_out_mock, allInstalledPackages_mock, list_ambari_managed_repos_mock, is_suse_family_mock):
+                            stack_versions_mock, put_structured_out_mock, allInstalledPackages_mock, list_ambari_managed_repos_mock, is_suse_family_mock):
     is_suse_family_mock = True
     Script.stack_version_from_distro_select = VERSION_STUB
-    hdp_versions_mock.side_effect = [
+    stack_versions_mock.side_effect = [
       [],  # before installation attempt
       [VERSION_STUB]
     ]
@@ -190,10 +190,10 @@ class TestInstallPackages(RMFTestCase):
   @patch("resource_management.libraries.functions.repo_version_history.write_actual_version_to_history_file")
   def test_exclude_existing_repo(self,  write_actual_version_to_history_file_mock,
                                  read_actual_version_from_history_file_mock,
-                                 hdp_versions_mock,
+                                 stack_versions_mock,
                                  allInstalledPackages_mock, put_structured_out_mock,
                                  is_redhat_family_mock, list_ambari_managed_repos_mock):
-    hdp_versions_mock.side_effect = [
+    stack_versions_mock.side_effect = [
       [],  # before installation attempt
       [VERSION_STUB]
     ]
@@ -324,11 +324,11 @@ class TestInstallPackages(RMFTestCase):
   @patch("resource_management.libraries.functions.repo_version_history.write_actual_version_to_history_file")
   def test_format_package_name(self,                                                                                    write_actual_version_to_history_file_mock,
                                read_actual_version_from_history_file_mock,
-                               hdp_versions_mock,
+                               stack_versions_mock,
                                allInstalledPackages_mock, put_structured_out_mock,
                                package_mock, is_suse_family_mock):
     Script.stack_version_from_distro_select = VERSION_STUB
-    hdp_versions_mock.side_effect = [
+    stack_versions_mock.side_effect = [
       [],  # before installation attempt
       [VERSION_STUB]
     ]
@@ -387,9 +387,9 @@ class TestInstallPackages(RMFTestCase):
   def test_version_reporting__build_number_defined(self,
                                                                                    write_actual_version_to_history_file_mock,
                                                                                    read_actual_version_from_history_file_mock,
-                                                                                   hdp_versions_mock,
+                                                                                   stack_versions_mock,
                                                                                    put_structured_out_mock, allInstalledPackages_mock, list_ambari_managed_repos_mock):
-    hdp_versions_mock.side_effect = [
+    stack_versions_mock.side_effect = [
       [OLD_VERSION_STUB],  # before installation attempt
       [OLD_VERSION_STUB, VERSION_STUB]
     ]
@@ -419,12 +419,12 @@ class TestInstallPackages(RMFTestCase):
     self.assertTrue(write_actual_version_to_history_file_mock.called)
     self.assertEquals(write_actual_version_to_history_file_mock.call_args[0], (VERSION_STUB_WITHOUT_BUILD_NUMBER, VERSION_STUB))
 
-    hdp_versions_mock.reset_mock()
+    stack_versions_mock.reset_mock()
     write_actual_version_to_history_file_mock.reset_mock()
     put_structured_out_mock.reset_mock()
 
     # Test retrying install again
-    hdp_versions_mock.side_effect = [
+    stack_versions_mock.side_effect = [
       [OLD_VERSION_STUB, VERSION_STUB],
       [OLD_VERSION_STUB, VERSION_STUB]
     ]
@@ -464,14 +464,14 @@ class TestInstallPackages(RMFTestCase):
   @patch("resource_management.libraries.functions.repo_version_history.read_actual_version_from_history_file")
   @patch("resource_management.libraries.functions.repo_version_history.write_actual_version_to_history_file")
   @patch("os.path.exists")
-  def test_version_reporting__build_number_not_defined__usr_hdp_present__no_components_installed(self,
+  def test_version_reporting__build_number_not_defined_stack_root_present__no_components_installed(self,
                                                                             exists_mock,
                                                                             write_actual_version_to_history_file_mock,
                                                                             read_actual_version_from_history_file_mock,
-                                                                            hdp_versions_mock,
+                                                                            stack_versions_mock,
                                                                             put_structured_out_mock, allInstalledPackages_mock, list_ambari_managed_repos_mock):
     exists_mock.return_value = True
-    hdp_versions_mock.side_effect = [
+    stack_versions_mock.side_effect = [
       [],  # before installation attempt
       []
     ]
@@ -508,7 +508,7 @@ class TestInstallPackages(RMFTestCase):
 
     self.assertFalse(write_actual_version_to_history_file_mock.called)
 
-    hdp_versions_mock.reset_mock()
+    stack_versions_mock.reset_mock()
     write_actual_version_to_history_file_mock.reset_mock()
     put_structured_out_mock.reset_mock()
 
@@ -520,14 +520,14 @@ class TestInstallPackages(RMFTestCase):
   @patch("resource_management.libraries.functions.repo_version_history.read_actual_version_from_history_file")
   @patch("resource_management.libraries.functions.repo_version_history.write_actual_version_to_history_file")
   @patch("os.path.exists")
-  def test_version_reporting__build_number_not_defined__usr_hdp_absent(self,
+  def test_version_reporting__build_number_not_defined_stack_root_absent(self,
                                                                         exists_mock,
                                                                         write_actual_version_to_history_file_mock,
                                                                         read_actual_version_from_history_file_mock,
-                                                                        hdp_versions_mock,
+                                                                        stack_versions_mock,
                                                                         put_structured_out_mock, allInstalledPackages_mock, list_ambari_managed_repos_mock):
     exists_mock.return_value = False
-    hdp_versions_mock.side_effect = [
+    stack_versions_mock.side_effect = [
       [],  # before installation attempt
       []
     ]
@@ -562,13 +562,13 @@ class TestInstallPackages(RMFTestCase):
 
     self.assertFalse(write_actual_version_to_history_file_mock.called)
 
-    hdp_versions_mock.reset_mock()
+    stack_versions_mock.reset_mock()
     write_actual_version_to_history_file_mock.reset_mock()
     put_structured_out_mock.reset_mock()
 
     # Test retrying install again  (correct build number, provided by other nodes, is now received from server)
 
-    hdp_versions_mock.side_effect = [
+    stack_versions_mock.side_effect = [
       [],  # before installation attempt
       []
     ]
@@ -611,12 +611,12 @@ class TestInstallPackages(RMFTestCase):
   @patch("resource_management.libraries.functions.stack_select.get_stack_versions")
   @patch("resource_management.libraries.functions.repo_version_history.read_actual_version_from_history_file")
   @patch("resource_management.libraries.functions.repo_version_history.write_actual_version_to_history_file")
-  def test_version_reporting__build_number_not_defined__usr_hdp_present(self,
+  def test_version_reporting__build_number_not_defined_stack_root_present(self,
                                                                     write_actual_version_to_history_file_mock,
                                                                     read_actual_version_from_history_file_mock,
-                                                                    hdp_versions_mock,
+                                                                    stack_versions_mock,
                                                                     put_structured_out_mock, allInstalledPackages_mock, list_ambari_managed_repos_mock):
-    hdp_versions_mock.side_effect = [
+    stack_versions_mock.side_effect = [
       [OLD_VERSION_STUB],  # before installation attempt
       [OLD_VERSION_STUB, VERSION_STUB]
     ]
@@ -646,12 +646,12 @@ class TestInstallPackages(RMFTestCase):
     self.assertTrue(write_actual_version_to_history_file_mock.called)
     self.assertEquals(write_actual_version_to_history_file_mock.call_args[0], (VERSION_STUB_WITHOUT_BUILD_NUMBER, VERSION_STUB))
 
-    hdp_versions_mock.reset_mock()
+    stack_versions_mock.reset_mock()
     write_actual_version_to_history_file_mock.reset_mock()
     put_structured_out_mock.reset_mock()
 
     # Test retrying install again
-    hdp_versions_mock.side_effect = [
+    stack_versions_mock.side_effect = [
       [OLD_VERSION_STUB, VERSION_STUB],
       [OLD_VERSION_STUB, VERSION_STUB]
     ]
@@ -689,12 +689,12 @@ class TestInstallPackages(RMFTestCase):
   @patch("resource_management.libraries.functions.stack_select.get_stack_versions")
   @patch("resource_management.libraries.functions.repo_version_history.read_actual_version_from_history_file")
   @patch("resource_management.libraries.functions.repo_version_history.write_actual_version_to_history_file")
-  def test_version_reporting__wrong_build_number_specified__usr_hdp_present(self,
+  def test_version_reporting__wrong_build_number_specified_stack_root_present(self,
                                                                         write_actual_version_to_history_file_mock,
                                                                         read_actual_version_from_history_file_mock,
-                                                                        hdp_versions_mock,
+                                                                        stack_versions_mock,
                                                                         put_structured_out_mock, allInstalledPackages_mock, list_ambari_managed_repos_mock):
-    hdp_versions_mock.side_effect = [
+    stack_versions_mock.side_effect = [
       [OLD_VERSION_STUB],  # before installation attempt
       [OLD_VERSION_STUB, VERSION_STUB]
     ]
@@ -724,12 +724,12 @@ class TestInstallPackages(RMFTestCase):
     self.assertTrue(write_actual_version_to_history_file_mock.called)
     self.assertEquals(write_actual_version_to_history_file_mock.call_args[0], ('2.2.0.1', VERSION_STUB))
 
-    hdp_versions_mock.reset_mock()
+    stack_versions_mock.reset_mock()
     write_actual_version_to_history_file_mock.reset_mock()
     put_structured_out_mock.reset_mock()
 
     # Test retrying install again
-    hdp_versions_mock.side_effect = [
+    stack_versions_mock.side_effect = [
       [OLD_VERSION_STUB, VERSION_STUB],
       [OLD_VERSION_STUB, VERSION_STUB]
     ]
@@ -768,14 +768,14 @@ class TestInstallPackages(RMFTestCase):
   @patch("resource_management.libraries.functions.repo_version_history.read_actual_version_from_history_file")
   @patch("resource_management.libraries.functions.repo_version_history.write_actual_version_to_history_file")
   @patch("os.path.exists")
-  def test_version_reporting__wrong_build_number_specified__usr_hdp_absent(self,
+  def test_version_reporting__wrong_build_number_specified_stack_root_absent(self,
                                                                             exists_mock,
                                                                             write_actual_version_to_history_file_mock,
                                                                             read_actual_version_from_history_file_mock,
-                                                                            hdp_versions_mock,
+                                                                            stack_versions_mock,
                                                                             put_structured_out_mock, allInstalledPackages_mock, list_ambari_managed_repos_mock):
     exists_mock.return_value = False
-    hdp_versions_mock.side_effect = [
+    stack_versions_mock.side_effect = [
       [],  # before installation attempt
       []
     ]
@@ -810,13 +810,13 @@ class TestInstallPackages(RMFTestCase):
 
     self.assertFalse(write_actual_version_to_history_file_mock.called)
 
-    hdp_versions_mock.reset_mock()
+    stack_versions_mock.reset_mock()
     write_actual_version_to_history_file_mock.reset_mock()
     put_structured_out_mock.reset_mock()
 
     # Test retrying install again (correct build number, provided by other nodes, is now received from server)
 
-    hdp_versions_mock.side_effect = [
+    stack_versions_mock.side_effect = [
       [],  # before installation attempt
       []
     ]