Browse Source

AMBARI-8429 Refactor version comparisons to use function (dsen)

Dmytro Sen 10 năm trước cách đây
mục cha
commit
accbbf2735
28 tập tin đã thay đổi với 39 bổ sung55 xóa
  1. 2 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/params.py
  2. 2 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
  3. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
  4. 2 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
  5. 0 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
  6. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
  7. 2 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
  8. 1 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py
  9. 1 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
  10. 3 4
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
  11. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
  12. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_server.py
  13. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
  14. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/templates/startHiveserver2.sh.j2
  15. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie.py
  16. 3 4
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
  17. 1 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
  18. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/service_check.py
  19. 1 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
  20. 1 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
  21. 1 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
  22. 1 2
      ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
  23. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/params.py
  24. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/storm.py
  25. 1 2
      ambari-server/src/main/resources/stacks/HDP/2.1/services/TEZ/package/scripts/params.py
  26. 1 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/KAFKA/package/scripts/params.py
  27. 1 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/KNOX/package/scripts/params.py
  28. 1 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/SLIDER/package/scripts/params.py

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/params.py

@@ -46,7 +46,8 @@ hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefi
 hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
 hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
 
-if str(config['hostLevelParams']['stack_version']).startswith('2.0') and System.get_instance().os_family != "suse":
+hdp_stack_version = format_hdp_stack_version(str(config['hostLevelParams']['stack_version']))
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.0') >= 0 and compare_versions(hdp_stack_version, '2.1') < 0 and System.get_instance().os_family != "suse":
   # deprecated rhel jsvc_path
   jsvc_path = "/usr/libexec/bigtop-utils"
 else:

+ 2 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py

@@ -25,10 +25,9 @@ config = Script.get_config()
 
 hdp_stack_version = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
-stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0
 
 #hadoop params
-if stack_is_hdp22_or_further:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
   hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
 else:
@@ -47,7 +46,7 @@ hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefi
 hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
 hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
 
-if str(config['hostLevelParams']['stack_version']).startswith('2.0') and System.get_instance().os_family != "suse":
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.0') >= 0 and compare_versions(hdp_stack_version, '2.1') < 0 and System.get_instance().os_family != "suse":
   # deprecated rhel jsvc_path
   jsvc_path = "/usr/libexec/bigtop-utils"
 else:

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py

@@ -21,7 +21,7 @@ from resource_management import *
 
 def setup_hdp_install_directory():
   import params
-  if params.stack_is_hdp22_or_further:
+  if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
     Execute(format('ambari-python-wrap /usr/bin/hdp-select set all `ambari-python-wrap /usr/bin/hdp-select versions | grep ^{hdp_stack_version} | tail -1`'),
             only_if=format('ls -d /usr/hdp/{hdp_stack_version}*')
     )

+ 2 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py

@@ -37,7 +37,6 @@ hdp_stack_version = str(config['hostLevelParams']['stack_version'])
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0
 
 # Some datanode settings
 dfs_dn_addr = default('/configurations/hdfs-site/dfs.datanode.address', None)
@@ -68,7 +67,7 @@ def is_secure_port(port):
     return False
 
 #hadoop params
-if stack_is_hdp22_or_further:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
   hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
   hadoop_home = "/usr/hdp/current/hadoop-client"
@@ -107,7 +106,7 @@ hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefi
 hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
 hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
 
-if str(config['hostLevelParams']['stack_version']).startswith('2.0') and System.get_instance().os_family != "suse":
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.0') >= 0 and compare_versions(hdp_stack_version, '2.1') < 0 and System.get_instance().os_family != "suse":
   # deprecated rhel jsvc_path
   jsvc_path = "/usr/libexec/bigtop-utils"
 else:

+ 0 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py

@@ -28,7 +28,6 @@ tmp_dir = Script.get_tmp_dir()
 
 hdp_stack_version = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
-stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0
 
 #users and groups
 hbase_user = config['configurations']['hbase-env']['hbase_user']

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py

@@ -58,6 +58,6 @@ def setup_java():
 def install_packages():
   import params
   packages = ['unzip', 'curl']
-  if params.stack_is_hdp22_or_further:
+  if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
     packages.append('hdp-select')
   Package(packages)

+ 2 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py

@@ -26,10 +26,9 @@ config = Script.get_config()
 
 hdp_stack_version = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
-stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0
 
 #hadoop params
-if stack_is_hdp22_or_further:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
   hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
   hadoop_lib_home = "/usr/hdp/current/hadoop-client/lib"
@@ -124,7 +123,7 @@ else:
 #hadoop-env.sh
 java_home = config['hostLevelParams']['java_home']
 
-if str(config['hostLevelParams']['stack_version']).startswith('2.0') and System.get_instance().os_family != "suse":
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.0') >= 0 and compare_versions(hdp_stack_version, '2.1') < 0 and System.get_instance().os_family != "suse":
   # deprecated rhel jsvc_path
   jsvc_path = "/usr/libexec/bigtop-utils"
 else:

+ 1 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py

@@ -29,10 +29,9 @@ security_enabled = False
 
 hdp_stack_version = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
-stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0
 
 #hadoop params
-if stack_is_hdp22_or_further:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   flume_bin = '/usr/hdp/current/flume-server/bin/flume-ng'
   flume_hive_home = '/usr/hdp/current/hive-metastore'
   flume_hcat_home = '/usr/hdp/current/hive-webhcat'

+ 1 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py

@@ -29,10 +29,9 @@ exec_tmp_dir = Script.get_tmp_dir()
 
 hdp_stack_version = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
-stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0
 
 #hadoop params
-if stack_is_hdp22_or_further:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   hadoop_bin_dir = format("/usr/hdp/current/hadoop-client/bin")
   daemon_script = format('/usr/hdp/current/hbase-client/bin/hbase-daemon.sh')
   region_mover = format('/usr/hdp/current/hbase-client/bin/region_mover.rb')

+ 3 - 4
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py

@@ -31,7 +31,6 @@ tmp_dir = Script.get_tmp_dir()
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 security_enabled = config['configurations']['cluster-env']['security_enabled']
-stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0
 hdfs_user = status_params.hdfs_user
 hadoop_pid_dir_prefix = status_params.hadoop_pid_dir_prefix
 
@@ -43,7 +42,7 @@ dfs_http_policy = default('/configurations/hdfs-site/dfs.http.policy', None)
 secure_dn_ports_are_in_use = False
 
 #hadoop params
-if stack_is_hdp22_or_further:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
   hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
   hadoop_bin = "/usr/hdp/current/hadoop-client/sbin"
@@ -241,7 +240,7 @@ lzo_packages_to_family = {
   "ubuntu": ["liblzo2-2"]
 }
 
-if stack_is_hdp22_or_further:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   lzo_packages_to_family["redhat"] += [format("hadooplzo_{underscorred_version}_*")]
   lzo_packages_to_family["suse"] += [format("hadooplzo_{underscorred_version}_*")]
   lzo_packages_to_family["ubuntu"] += [format("hadooplzo_{dashed_version}_*")]
@@ -261,7 +260,7 @@ hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
 #hadoop-env.sh
 java_home = config['hostLevelParams']['java_home']
 
-if hdp_stack_version.startswith('2.0') and System.get_instance().os_family != "suse":
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.0') >= 0 and compare_versions(hdp_stack_version, '2.1') < 0 and System.get_instance().os_family != "suse":
   # deprecated rhel jsvc_path
   jsvc_path = "/usr/libexec/bigtop-utils"
 else:

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py

@@ -52,12 +52,12 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
     hadoop_secure_dn_pid_file = format("{hadoop_secure_dn_pid_dir}/hadoop_secure_dn.pid")
 
     # At Champlain stack and further, we may start datanode as a non-root even in secure cluster
-    if not params.stack_is_hdp22_or_further or params.secure_dn_ports_are_in_use:
+    if not (params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0) or params.secure_dn_ports_are_in_use:
       user = "root"
       pid_file = format(
         "{hadoop_pid_dir_prefix}/{hdfs_user}/hadoop-{hdfs_user}-{name}.pid")
 
-    if action == 'stop' and params.stack_is_hdp22_or_further and \
+    if action == 'stop' and (params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0) and \
       os.path.isfile(hadoop_secure_dn_pid_file):
         # We need special handling for this case to handle the situation
         # when we configure non-root secure DN and then restart it

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_server.py

@@ -33,7 +33,7 @@ class HiveServer(Script):
   def configure(self, env):
     import params
     env.set_params(params)
-    if not params.stack_is_hdp22_or_further:
+    if not (params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >=0):
       install_tez_jars()
     hive(name='hiveserver2')
 

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py

@@ -30,11 +30,11 @@ tmp_dir = Script.get_tmp_dir()
 # This is expected to be of the form #.#.#.#
 hdp_stack_version = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
-stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0
+stack_is_hdp21 = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.1') >= 0 and compare_versions(hdp_stack_version, '2.2') < 0
 
 # Hadoop params
 # TODO, this logic should initialize these parameters in a file inside the HDP 2.2 stack.
-if stack_is_hdp22_or_further:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   hive_bin = '/usr/hdp/current/hive-client/bin'

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/templates/startHiveserver2.sh.j2

@@ -20,7 +20,7 @@
 #
 
 HIVE_SERVER2_OPTS=" -hiveconf hive.log.file=hiveserver2.log -hiveconf hive.log.dir=$5"
-{% if hive_authorization_enabled == True and str(hdp_stack_version).startswith('2.1') %}
+{% if hive_authorization_enabled == True and stack_is_hdp21 %}
 # HiveServer 2 -hiveconf options
 HIVE_SERVER2_OPTS="${HIVE_SERVER2_OPTS} -hiveconf hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator -hiveconf hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory "
 {% endif %}

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie.py

@@ -63,7 +63,7 @@ def oozie(is_server=False # TODO: see if see can remove this
       owner=params.oozie_user
     )
 
-  if params.stack_is_hdp22_or_further:
+  if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
     File(format("{params.conf_dir}/adminusers.txt"),
       mode=0644,
       group=params.user_group,
@@ -162,7 +162,7 @@ def oozie_server_specific():
     not_if  = no_op_test
   )
 
-  if params.stack_is_hdp22_or_further:
+  if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
     # Create hive-site and tez-site configs for oozie
     Directory(params.hive_conf_dir,
         recursive = True,

+ 3 - 4
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py

@@ -30,10 +30,9 @@ tmp_dir = Script.get_tmp_dir()
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
-stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0
 
 #hadoop params
-if stack_is_hdp22_or_further:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_lib_home = "/usr/hdp/current/hadoop-client/lib"
   oozie_lib_dir = "/usr/hdp/current/oozie-client/"
@@ -102,7 +101,7 @@ oozie_server_port = get_port_from_url(config['configurations']['oozie-site']['oo
 oozie_server_admin_port = config['configurations']['oozie-env']['oozie_admin_port']
 fs_root = config['configurations']['core-site']['fs.defaultFS']
 
-if str(hdp_stack_version).startswith('2.0') or str(hdp_stack_version).startswith('2.1'):
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.0') >= 0 and compare_versions(hdp_stack_version, '2.2') < 0:
   put_shared_lib_to_hdfs_cmd = format("hadoop --config {hadoop_conf_dir} dfs -put {oozie_shared_lib} {oozie_hdfs_user_dir}")
 # for newer
 else:
@@ -161,7 +160,7 @@ lzo_packages_to_family = {
   "ubuntu": ["liblzo2-2"]
 }
 
-if stack_is_hdp22_or_further:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   lzo_packages_to_family["redhat"] += [format("hadooplzo_{underscorred_version}_*")]
   lzo_packages_to_family["suse"] += [format("hadooplzo_{underscorred_version}_*")]
   lzo_packages_to_family["ubuntu"] += [format("hadooplzo_{dashed_version}_*")]

+ 1 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py

@@ -28,10 +28,9 @@ tmp_dir = Script.get_tmp_dir()
 
 hdp_stack_version = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
-stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0
 
 #hadoop params
-if stack_is_hdp22_or_further:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   pig_bin_dir = '/usr/hdp/current/pig-client/bin'

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/service_check.py

@@ -66,7 +66,7 @@ class PigServiceCheck(Script):
       bin_dir = params.hadoop_bin_dir
     )
 
-    if params.stack_is_hdp22_or_further:
+    if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
       # cleanup results from previous test
       ExecuteHadoop( create_file_cmd,
         tries     = 3,

+ 1 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py

@@ -24,10 +24,9 @@ config = Script.get_config()
 
 hdp_stack_version = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
-stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0
 
 #hadoop params
-if stack_is_hdp22_or_further:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   sqoop_conf_dir = '/etc/sqoop/conf'
   sqoop_lib = '/usr/hdp/current/sqoop-client/lib'
   hadoop_home = '/usr/hdp/current/hbase-client'

+ 1 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py

@@ -30,10 +30,9 @@ tmp_dir = Script.get_tmp_dir()
 # This is expected to be of the form #.#.#.#
 hdp_stack_version = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
-stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0
 
 #hadoop params
-if stack_is_hdp22_or_further:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
   hadoop_bin = "/usr/hdp/current/hadoop-client/sbin"
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"

+ 1 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py

@@ -29,10 +29,9 @@ tmp_dir = Script.get_tmp_dir()
 
 hdp_stack_version = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
-stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0
 
 #hadoop params
-if stack_is_hdp22_or_further:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   zk_home = '/usr/hdp/current/zookeeper-client'
   zk_bin = '/usr/hdp/current/zookeeper-client/bin'
   smoke_script = '/usr/hdp/current/zookeeper-client/bin/zkCli.sh'

+ 1 - 2
ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py

@@ -26,10 +26,9 @@ config = Script.get_config()
 
 hdp_stack_version = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
-stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0
 
 #hadoop params
-if stack_is_hdp22_or_further:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   falcon_webapp_dir = "/usr/hdp/current/falcon-client/webapp"
   falcon_home = "/usr/hdp/current/falcon-client"

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/params.py

@@ -30,7 +30,7 @@ hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
 stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0
 
 #hadoop params
-if stack_is_hdp22_or_further:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   rest_lib_dir = '/usr/hdp/current/storm-client/contrib/storm-rest'
   storm_bin_dir = "/usr/hdp/current/storm-client/bin"
 else:
@@ -69,7 +69,7 @@ if security_enabled:
   storm_jaas_principal = _storm_principal_name.replace('_HOST',_hostname_lowercase)
   storm_keytab_path = config['configurations']['storm-env']['storm_keytab']
   
-  if stack_is_hdp22_or_further:
+  if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
     storm_ui_keytab_path = config['configurations']['storm-env']['storm_ui_keytab']
     _storm_ui_jaas_principal_name = config['configurations']['storm-env']['storm_ui_principal_name']
     storm_ui_host = default("/clusterHostInfo/storm_ui_server_hosts", [])

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/storm.py

@@ -66,7 +66,7 @@ def storm():
     TemplateConfig(format("{conf_dir}/storm_jaas.conf"),
                    owner=params.storm_user
     )
-    if params.stack_is_hdp22_or_further:
+    if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
       TemplateConfig(format("{conf_dir}/client_jaas.conf"),
                      owner=params.storm_user
       )

+ 1 - 2
ambari-server/src/main/resources/stacks/HDP/2.1/services/TEZ/package/scripts/params.py

@@ -27,9 +27,8 @@ config = Script.get_config()
 # This is expected to be of the form #.#.#.#
 hdp_stack_version = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
-stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0
 
-if stack_is_hdp22_or_further:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
 else:
   hadoop_bin_dir = "/usr/bin"

+ 1 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/KAFKA/package/scripts/params.py

@@ -27,9 +27,8 @@ config = Script.get_config()
 
 hdp_stack_version = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
-stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0
 
-if stack_is_hdp22_or_further:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
     kafka_home = '/usr/hdp/current/kafka-broker/'
     kafka_bin = kafka_home+'bin/kafka'
 else:

+ 1 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/KNOX/package/scripts/params.py

@@ -28,9 +28,8 @@ tmp_dir = Script.get_tmp_dir()
 
 hdp_stack_version = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
-stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0
 
-if stack_is_hdp22_or_further:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   knox_bin = '/usr/hdp/current/knox-server/bin/gateway.sh'
   ldap_bin = '/usr/hdp/current/knox-server/bin/ldap.sh'
   knox_client_bin = '/usr/hdp/current/knox-server/bin/knoxcli.sh'

+ 1 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/SLIDER/package/scripts/params.py

@@ -26,10 +26,9 @@ config = Script.get_config()
 
 hdp_stack_version = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
-stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0
 
 #hadoop params
-if stack_is_hdp22_or_further:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   slider_bin_dir = '/usr/hdp/current/slider-client/bin'
 else:
   slider_bin_dir = "/usr/lib/slider/bin"