Browse Source

AMBARI-7884 HDP-2.2 stack should not hard code rpm version to 2.2.0.0 in cluster-env.xml (dsen)

Dmytro Sen 10 years ago
parent
commit
b2740331a5
41 changed files with 117 additions and 143 deletions
  1. 3 19
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/params.py
  2. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
  3. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
  4. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
  5. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
  6. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
  7. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
  8. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py
  9. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
  10. 4 7
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
  11. 4 5
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
  12. 3 8
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
  13. 4 7
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
  14. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
  15. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
  16. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
  17. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
  18. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/params.py
  19. 5 2
      ambari-server/src/main/resources/stacks/HDP/2.1/services/TEZ/package/scripts/params.py
  20. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/FALCON/metainfo.xml
  21. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/FLUME/metainfo.xml
  22. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml
  23. 9 9
      ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml
  24. 6 6
      ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
  25. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/KAFKA/metainfo.xml
  26. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.2/services/KAFKA/package/scripts/params.py
  27. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/KNOX/metainfo.xml
  28. 4 4
      ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml
  29. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/PIG/metainfo.xml
  30. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/SLIDER/metainfo.xml
  31. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.2/services/SLIDER/package/scripts/params.py
  32. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/SQOOP/metainfo.xml
  33. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/metainfo.xml
  34. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/metainfo.xml
  35. 6 6
      ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/metainfo.xml
  36. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/ZOOKEEPER/metainfo.xml
  37. 4 4
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
  38. 0 0
      ambari-server/src/test/python/stacks/2.1/configs/default-storm-start.json
  39. 0 0
      ambari-server/src/test/python/stacks/2.1/configs/secured-storm-start.json
  40. 1 1
      ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py
  41. 0 1
      ambari-server/src/test/python/stacks/2.2/configs/default.json

+ 3 - 19
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/params.py

@@ -34,33 +34,18 @@ java_home = config['hostLevelParams']['java_home']
 
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 
-
-#RPM versioning support
-rpm_version = default("/configurations/cluster-env/rpm_version", None)
-
-#hadoop params
-if rpm_version:
-  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce/*"
-  hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
-else:
-  mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
 
 hadoop_conf_dir = "/etc/hadoop/conf"
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 versioned_hdp_root = '/usr/hdp/current'
-#security params
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-#java params
-java_home = config['hostLevelParams']['java_home']
+
 #hadoop params
 hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
 hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
 hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
 
-#hadoop-env.sh
-java_home = config['hostLevelParams']['java_home']
-
 if str(config['hostLevelParams']['stack_version']).startswith('2.0') and System.get_instance().os_family != "suse":
   # deprecated rhel jsvc_path
   jsvc_path = "/usr/libexec/bigtop-utils"
@@ -108,7 +93,6 @@ smoke_user =  config['configurations']['cluster-env']['smokeuser']
 gmetad_user = config['configurations']['ganglia-env']["gmetad_user"]
 gmond_user = config['configurations']['ganglia-env']["gmond_user"]
 
-user_group = config['configurations']['cluster-env']['user_group']
 proxyuser_group =  default("/configurations/hadoop-env/proxyuser_group","users")
 nagios_group = config['configurations']['nagios-env']['nagios_group']
 

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py

@@ -22,11 +22,11 @@ from resource_management.core.system import System
 
 config = Script.get_config()
 
-#RPM versioning support
-rpm_version = default("/configurations/cluster-env/rpm_version", None)
+hdp_stack_version = str(config['hostLevelParams']['stack_version'])
+stack_is_hdp22_or_further = not (hdp_stack_version.startswith('2.0') or hdp_stack_version.startswith('2.1'))
 
 #hadoop params
-if rpm_version:
+if stack_is_hdp22_or_further:
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
   hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
 else:

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py

@@ -21,10 +21,10 @@ from resource_management import *
 
 def setup_hdp_install_directory():
   import params
-  if params.rpm_version:
-    Execute(format('ambari-python-wrap /usr/bin/hdp-select set all `ambari-python-wrap /usr/bin/hdp-select versions | grep ^{rpm_version}- | tail -1`'),
+  if params.stack_is_hdp22_or_further:
+    Execute(format('ambari-python-wrap /usr/bin/hdp-select set all `ambari-python-wrap /usr/bin/hdp-select versions | grep ^{hdp_stack_version} | tail -1`'),
             not_if=format('test -d {versioned_hdp_root}'),
-            only_if=format('ls -d /usr/hdp/{rpm_version}-*')
+            only_if=format('ls -d /usr/hdp/{hdp_stack_version}*')
     )
 
 def setup_config():

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py

@@ -32,11 +32,11 @@ java_home = config['hostLevelParams']['java_home']
 
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 
-#RPM versioning support
-rpm_version = default("/configurations/cluster-env/rpm_version", None)
+hdp_stack_version = str(config['hostLevelParams']['stack_version'])
+stack_is_hdp22_or_further = not (hdp_stack_version.startswith('2.0') or hdp_stack_version.startswith('2.1'))
 
 #hadoop params
-if rpm_version:
+if stack_is_hdp22_or_further:
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
   hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
   hadoop_home = "/usr/hdp/current/hadoop-client"

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py

@@ -25,8 +25,8 @@ import collections
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-#RPM versioning support
-rpm_version = default("/configurations/cluster-env/rpm_version", None)
+hdp_stack_version = str(config['hostLevelParams']['stack_version'])
+stack_is_hdp22_or_further = not (hdp_stack_version.startswith('2.0') or hdp_stack_version.startswith('2.1'))
 
 #users and groups
 hbase_user = config['configurations']['hbase-env']['hbase_user']

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py

@@ -58,6 +58,6 @@ def setup_java():
 def install_packages():
   import params
   packages = ['unzip', 'curl']
-  if params.rpm_version:
+  if params.stack_is_hdp22_or_further:
     packages.append('hdp-select')
   Package(packages)

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py

@@ -23,11 +23,11 @@ import os
 
 config = Script.get_config()
 
-#RPM versioning support
-rpm_version = default("/configurations/cluster-env/rpm_version", None)
+hdp_stack_version = str(config['hostLevelParams']['stack_version'])
+stack_is_hdp22_or_further = not (hdp_stack_version.startswith('2.0') or hdp_stack_version.startswith('2.1'))
 
 #hadoop params
-if rpm_version:
+if stack_is_hdp22_or_further:
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
   hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
   hadoop_lib_home = "/usr/hdp/current/hadoop-client/lib"

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py

@@ -26,11 +26,11 @@ proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']
 
 security_enabled = False
 
-#RPM versioning support
-rpm_version = default("/configurations/cluster-env/rpm_version", None)
+hdp_stack_version = str(config['hostLevelParams']['stack_version'])
+stack_is_hdp22_or_further = not (hdp_stack_version.startswith('2.0') or hdp_stack_version.startswith('2.1'))
 
 #hadoop params
-if rpm_version:
+if stack_is_hdp22_or_further:
   flume_bin = '/usr/hdp/current/flume-client/bin/flume-ng'
 else:
   flume_bin = '/usr/bin/flume-ng'

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py

@@ -26,11 +26,11 @@ import status_params
 config = Script.get_config()
 exec_tmp_dir = Script.get_tmp_dir()
 
-#RPM versioning support
-rpm_version = default("/configurations/cluster-env/rpm_version", None)
+hdp_stack_version = str(config['hostLevelParams']['stack_version'])
+stack_is_hdp22_or_further = not (hdp_stack_version.startswith('2.0') or hdp_stack_version.startswith('2.1'))
 
 #hadoop params
-if rpm_version:
+if stack_is_hdp22_or_further:
   hadoop_bin_dir = format("/usr/hdp/current/hadoop-client/bin")
   daemon_script = format('/usr/hdp/current/hbase-client/bin/hbase-daemon.sh')
   region_mover = format('/usr/hdp/current/hbase-client/bin/region_mover.rb')

+ 4 - 7
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py

@@ -24,11 +24,11 @@ import os
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-#RPM versioning support
-rpm_version = default("/configurations/cluster-env/rpm_version", None)
+hdp_stack_version = str(config['hostLevelParams']['stack_version'])
+stack_is_hdp22_or_further = not (hdp_stack_version.startswith('2.0') or hdp_stack_version.startswith('2.1'))
 
 #hadoop params
-if rpm_version:
+if stack_is_hdp22_or_further:
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
   hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
   hadoop_bin = "/usr/hdp/current/hadoop-client/sbin"
@@ -210,11 +210,8 @@ hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
 
 #hadoop-env.sh
 java_home = config['hostLevelParams']['java_home']
-stack_version = str(config['hostLevelParams']['stack_version'])
 
-stack_is_hdp22_or_further = not (stack_version.startswith('2.0') or stack_version.startswith('2.1'))
-
-if stack_version.startswith('2.0') and System.get_instance().os_family != "suse":
+if hdp_stack_version.startswith('2.0') and System.get_instance().os_family != "suse":
   # deprecated rhel jsvc_path
   jsvc_path = "/usr/libexec/bigtop-utils"
 else:

+ 4 - 5
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py

@@ -26,15 +26,14 @@ import os
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-#RPM versioning support
-rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
-hdp_stack_version = config['hostLevelParams']['stack_version']
+hdp_stack_version = str(config['hostLevelParams']['stack_version'])
+stack_is_hdp22_or_further = not (hdp_stack_version.startswith('2.0') or hdp_stack_version.startswith('2.1'))
 
 # Hadoop params
-# TODO, this logic assumes that the existence of rpm_version => HDP version is 2.2 or greater.
+# TODO, this logic assumes that the existence of HDP version is 2.2 or greater.
 # Instead, it should initialize these parameters in a file inside the HDP 2.2 stack.
-if rpm_version:
+if stack_is_hdp22_or_further:
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   hadoop_streeming_jars = "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming-*.jar"

+ 3 - 8
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py

@@ -26,11 +26,11 @@ import os
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-#RPM versioning support
-rpm_version = default("/configurations/cluster-env/rpm_version", None)
+hdp_stack_version = str(config['hostLevelParams']['stack_version'])
+stack_is_hdp22_or_further = not (hdp_stack_version.startswith('2.0') or hdp_stack_version.startswith('2.1'))
 
 #hadoop params
-if rpm_version:
+if stack_is_hdp22_or_further:
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_lib_home = "/usr/hdp/current/hadoop-client/lib"
   oozie_lib_dir = "/usr/hdp/current/oozie-client/"
@@ -72,7 +72,6 @@ oozie_hdfs_user_dir = format("/user/{oozie_user}")
 oozie_pid_dir = status_params.oozie_pid_dir
 pid_file = status_params.pid_file
 hadoop_jar_location = "/usr/lib/hadoop/"
-hdp_stack_version = config['hostLevelParams']['stack_version']
 # for HDP1 it's "/usr/share/HDP-oozie/ext.zip"
 ext_js_path = "/usr/share/HDP-oozie/ext-2.2.zip"
 security_enabled = config['configurations']['cluster-env']['security_enabled']
@@ -94,7 +93,6 @@ oozie_log_dir = config['configurations']['oozie-env']['oozie_log_dir']
 oozie_data_dir = config['configurations']['oozie-env']['oozie_data_dir']
 oozie_server_port = get_port_from_url(config['configurations']['oozie-site']['oozie.base.url'])
 oozie_server_admin_port = config['configurations']['oozie-env']['oozie_admin_port']
-oozie_env_sh_template = config['configurations']['oozie-env']['content']
 fs_root = config['configurations']['core-site']['fs.defaultFS']
 
 if str(hdp_stack_version).startswith('2.0') or str(hdp_stack_version).startswith('2.1'):
@@ -125,10 +123,7 @@ if (('oozie-log4j' in config['configurations']) and ('content' in config['config
 else:
   log4j_props = None
 
-oozie_hdfs_user_dir = format("/user/{oozie_user}")
 oozie_hdfs_user_mode = 0775
-#for create_hdfs_directory
-hostname = config["hostname"]
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']

+ 4 - 7
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py

@@ -25,11 +25,11 @@ from resource_management import *
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-#RPM versioning support
-rpm_version = default("/configurations/cluster-env/rpm_version", None)
+hdp_stack_version = str(config['hostLevelParams']['stack_version'])
+stack_is_hdp22_or_further = not (hdp_stack_version.startswith('2.0') or hdp_stack_version.startswith('2.1'))
 
 #hadoop params
-if rpm_version:
+if stack_is_hdp22_or_further:
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   pig_bin_dir = '/usr/hdp/current/pig-client/bin'
@@ -54,7 +54,4 @@ java64_home = config['hostLevelParams']['java_home']
 
 pig_properties = config['configurations']['pig-properties']['content']
 
-log4j_props = config['configurations']['pig-log4j']['content']
-
-stack_version = str(config['hostLevelParams']['stack_version'])
-stack_is_hdp22_or_further = not (stack_version.startswith('2.0') or stack_version.startswith('2.1'))
+log4j_props = config['configurations']['pig-log4j']['content']

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py

@@ -21,11 +21,11 @@ from resource_management import *
 
 config = Script.get_config()
 
-#RPM versioning support
-rpm_version = default("/configurations/cluster-env/rpm_version", None)
+hdp_stack_version = str(config['hostLevelParams']['stack_version'])
+stack_is_hdp22_or_further = not (hdp_stack_version.startswith('2.0') or hdp_stack_version.startswith('2.1'))
 
 #hadoop params
-if rpm_version:
+if stack_is_hdp22_or_further:
   sqoop_conf_dir = '/etc/sqoop/conf'
   sqoop_lib = '/usr/hdp/current/sqoop-client/lib'
   hadoop_home = '/usr/hdp/current/hbase-client'

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py

@@ -27,11 +27,11 @@ import status_params
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-#RPM versioning support
-rpm_version = default("/configurations/cluster-env/rpm_version", None)
+hdp_stack_version = str(config['hostLevelParams']['stack_version'])
+stack_is_hdp22_or_further = not (hdp_stack_version.startswith('2.0') or hdp_stack_version.startswith('2.1'))
 
 #hadoop params
-if rpm_version:
+if stack_is_hdp22_or_further:
   hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
   hadoop_bin = "/usr/hdp/current/hadoop-client/sbin"
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py

@@ -26,11 +26,11 @@ import status_params
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-#RPM versioning support
-rpm_version = default("/configurations/cluster-env/rpm_version", None)
+hdp_stack_version = str(config['hostLevelParams']['stack_version'])
+stack_is_hdp22_or_further = not (hdp_stack_version.startswith('2.0') or hdp_stack_version.startswith('2.1'))
 
 #hadoop params
-if rpm_version:
+if stack_is_hdp22_or_further:
   zk_home = '/usr/hdp/current/zookeeper-client'
   zk_bin = '/usr/hdp/current/zookeeper-client/bin'
   smoke_script = '/usr/hdp/current/zookeeper-client/bin/zkCli.sh'

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py

@@ -23,11 +23,11 @@ from status_params import *
 
 config = Script.get_config()
 
-#RPM versioning support
-rpm_version = default("/configurations/cluster-env/rpm_version", None)
+hdp_stack_version = str(config['hostLevelParams']['stack_version'])
+stack_is_hdp22_or_further = not (hdp_stack_version.startswith('2.0') or hdp_stack_version.startswith('2.1'))
 
 #hadoop params
-if rpm_version:
+if stack_is_hdp22_or_further:
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   falcon_webapp_dir = "/usr/hdp/current/falcon-client/webapp"
   falcon_home = "/usr/hdp/current/falcon-client"

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/params.py

@@ -24,11 +24,11 @@ import status_params
 # server configurations
 config = Script.get_config()
 
-#RPM versioning support
-rpm_version = default("/configurations/cluster-env/rpm_version", None)
+hdp_stack_version = str(config['hostLevelParams']['stack_version'])
+stack_is_hdp22_or_further = not (hdp_stack_version.startswith('2.0') or hdp_stack_version.startswith('2.1'))
 
 #hadoop params
-if rpm_version:
+if stack_is_hdp22_or_further:
   rest_lib_dir = '/usr/hdp/current/storm-client/contrib/storm-rest'
   storm_bin_dir = "/usr/hdp/current/storm-client/bin"
 else:

+ 5 - 2
ambari-server/src/main/resources/stacks/HDP/2.1/services/TEZ/package/scripts/params.py

@@ -25,8 +25,11 @@ config = Script.get_config()
 
 # RPM versioning support
 rpm_version = default("/configurations/cluster-env/rpm_version", None)
-if rpm_version:
-  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
+
+hdp_stack_version = str(config['hostLevelParams']['stack_version'])
+stack_is_hdp22_or_further = not (hdp_stack_version.startswith('2.0') or hdp_stack_version.startswith('2.1'))
+
+if stack_is_hdp22_or_further:  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
 else:
   hadoop_bin_dir = "/usr/bin"
 hadoop_conf_dir = "/etc/hadoop/conf"

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/FALCON/metainfo.xml

@@ -27,7 +27,7 @@
           <osFamily>redhat5,redhat6,suse11</osFamily>
           <packages>
             <package>
-              <name>falcon_2_2_0_0_*</name>
+              <name>falcon_2_2_*</name>
             </package>
           </packages>
         </osSpecific>
@@ -35,7 +35,7 @@
           <osFamily>ubuntu12</osFamily>
           <packages>
             <package>
-              <name>falcon-2-2-0-0-.*</name>
+              <name>falcon-2-2-.*</name>
             </package>
           </packages>
         </osSpecific>

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/FLUME/metainfo.xml

@@ -28,7 +28,7 @@
           <osFamily>redhat5,redhat6,suse11</osFamily>
           <packages>
             <package>
-              <name>flume_2_2_0_0_*</name>
+              <name>flume_2_2_*</name>
             </package>
           </packages>
         </osSpecific>
@@ -36,7 +36,7 @@
           <osFamily>ubuntu12</osFamily>
           <packages>
             <package>
-              <name>flume-2-2-0-0-.*</name>
+              <name>flume-2-2-.*</name>
             </package>
           </packages>
         </osSpecific>

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml

@@ -28,7 +28,7 @@
           <osFamily>redhat5,redhat6,suse11</osFamily>
           <packages>
             <package>
-              <name>hbase_2_2_0_0_*</name>
+              <name>hbase_2_2_*</name>
             </package>
           </packages>
         </osSpecific>
@@ -36,7 +36,7 @@
           <osFamily>ubuntu12</osFamily>
           <packages>
             <package>
-              <name>hbase-2-2-0-0-.*</name>
+              <name>hbase-2-2-.*</name>
             </package>
           </packages>
         </osSpecific>

+ 9 - 9
ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml

@@ -37,7 +37,7 @@
           <osFamily>redhat5,redhat6,suse11</osFamily>
           <packages>
             <package>
-              <name>hadoop_2_2_0_0_*</name>
+              <name>hadoop_2_2_*</name>
             </package>
             <package>
               <name>snappy</name>
@@ -52,7 +52,7 @@
               <name>hadoop-lzo-native</name>
             </package>
             <package>
-              <name>hadoop_2_2_0_0_*-libhdfs</name>
+              <name>hadoop_2_2_*-libhdfs</name>
             </package>
             <package>
               <name>ambari-log4j</name>
@@ -64,22 +64,22 @@
           <osFamily>ubuntu12</osFamily>
           <packages>
             <package>
-              <name>hadoop-2-2-0-0-.*-client</name>
+              <name>hadoop-2-2-.*-client</name>
             </package>
             <package>
-              <name>hadoop-2-2-0-0-.*-hdfs-datanode</name>
+              <name>hadoop-2-2-.*-hdfs-datanode</name>
             </package>
             <package>
-              <name>hadoop-2-2-0-0-.*-hdfs-journalnode</name>
+              <name>hadoop-2-2-.*-hdfs-journalnode</name>
             </package>
             <package>
-              <name>hadoop-2-2-0-0-.*-hdfs-namenode</name>
+              <name>hadoop-2-2-.*-hdfs-namenode</name>
             </package>
             <package>
-              <name>hadoop-2-2-0-0-.*-hdfs-secondarynamenode</name>
+              <name>hadoop-2-2-.*-hdfs-secondarynamenode</name>
             </package>
             <package>
-              <name>hadoop-2-2-0-0-.*-hdfs-zkfc</name>
+              <name>hadoop-2-2-.*-hdfs-zkfc</name>
             </package>
             <package>
               <name>libsnappy1</name>
@@ -91,7 +91,7 @@
               <name>liblzo2-2</name>
             </package>
             <package>
-              <name>libhdfs0-2-2-0-0-.*</name>
+              <name>libhdfs0-2-2-.*</name>
             </package>
           </packages>
         </osSpecific>

+ 6 - 6
ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml

@@ -34,13 +34,13 @@
           <osFamily>redhat5,redhat6,suse11</osFamily>
           <packages>
             <package>
-              <name>hive_2_2_0_0_*</name>
+              <name>hive_2_2_*</name>
             </package>
             <package>
-              <name>hive_2_2_0_0_*-hcatalog</name>
+              <name>hive_2_2_*-hcatalog</name>
             </package>
             <package>
-              <name>hive_2_2_0_0_*-webhcat</name>
+              <name>hive_2_2_*-webhcat</name>
             </package>
             <package>
               <name>mysql</name>
@@ -51,13 +51,13 @@
           <osFamily>ubuntu12</osFamily>
           <packages>
             <package>
-              <name>hive-2-2-0-0-.*</name>
+              <name>hive-2-2-.*</name>
             </package>
             <package>
-              <name>hive-2-2-0-0-.*-hcatalog</name>
+              <name>hive-2-2-.*-hcatalog</name>
             </package>
             <package>
-              <name>hive-2-2-0-0-.*-webhcat</name>
+              <name>hive-2-2-.*-webhcat</name>
             </package>
           </packages>
         </osSpecific>

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/KAFKA/metainfo.xml

@@ -50,7 +50,7 @@
           <osFamily>redhat5,redhat6,suse11</osFamily>
           <packages>
             <package>
-              <name>kafka_2_2_0_0_*</name>
+              <name>kafka_2_2_*</name>
             </package>
           </packages>
         </osSpecific>
@@ -58,7 +58,7 @@
           <osFamily>ubuntu12</osFamily>
           <packages>
             <package>
-              <name>kafka-2-2-0-0-.*</name>
+              <name>kafka-2-2-.*</name>
             </package>
           </packages>
         </osSpecific>

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.2/services/KAFKA/package/scripts/params.py

@@ -24,11 +24,11 @@ import status_params
 # server configurations
 config = Script.get_config()
 
-#RPM versioning support
-rpm_version = default("/configurations/cluster-env/rpm_version", None)
+hdp_stack_version = str(config['hostLevelParams']['stack_version'])
+stack_is_hdp22_or_further = not (hdp_stack_version.startswith('2.0') or hdp_stack_version.startswith('2.1'))
 
 
-if rpm_version:
+if stack_is_hdp22_or_further:
     kafka_home = '/usr/hdp/current/kafka-broker/'
     kafka_bin = kafka_home+'bin/kafka'
 else:

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/KNOX/metainfo.xml

@@ -58,7 +58,7 @@
           <osFamily>redhat5,redhat6,suse11</osFamily>
           <packages>
             <package>
-              <name>knox_2_2_0_0_*</name>
+              <name>knox_2_2_*</name>
             </package>
           </packages>
         </osSpecific>
@@ -66,7 +66,7 @@
           <osFamily>ubuntu12</osFamily>
           <packages>
             <package>
-              <name>knox-2-2-0-0-.*</name>
+              <name>knox-2-2-.*</name>
             </package>
           </packages>
         </osSpecific>

+ 4 - 4
ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml

@@ -40,10 +40,10 @@
           <osFamily>redhat5,redhat6,suse11</osFamily>
           <packages>
             <package>
-              <name>oozie_2_2_0_0_*</name>
+              <name>oozie_2_2_*</name>
             </package>
             <package>
-              <name>falcon_2_2_0_0_*</name>
+              <name>falcon_2_2_*</name>
             </package>
           </packages>
         </osSpecific>
@@ -51,10 +51,10 @@
           <osFamily>ubuntu12</osFamily>
           <packages>
             <package>
-              <name>oozie-2-2-0-0-.*</name>
+              <name>oozie-2-2-.*</name>
             </package>
             <package>
-              <name>falcon-2-2-0-0-.*</name>
+              <name>falcon-2-2-.*</name>
             </package>
           </packages>
         </osSpecific>

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/PIG/metainfo.xml

@@ -27,7 +27,7 @@
           <osFamily>redhat5,redhat6,suse11</osFamily>
           <packages>
             <package>
-              <name>pig_2_2_0_0_*</name>
+              <name>pig_2_2_*</name>
             </package>
           </packages>
         </osSpecific>
@@ -35,7 +35,7 @@
           <osFamily>ubuntu12</osFamily>
           <packages>
             <package>
-              <name>pig-2-2-0-0-.*</name>
+              <name>pig-2-2-.*</name>
             </package>
           </packages>
         </osSpecific>

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/SLIDER/metainfo.xml

@@ -89,7 +89,7 @@
           <osFamily>redhat5,redhat6,suse11</osFamily>
           <packages>
             <package>
-              <name>slider_2_2_0_0_*</name>
+              <name>slider_2_2_*</name>
             </package>
           </packages>
         </osSpecific>
@@ -97,7 +97,7 @@
           <osFamily>ubuntu12</osFamily>
           <packages>
             <package>
-              <name>slider-2-2-0-0-.*</name>
+              <name>slider-2-2-.*</name>
             </package>
           </packages>
         </osSpecific>

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.2/services/SLIDER/package/scripts/params.py

@@ -23,11 +23,11 @@ from resource_management import *
 # server configurations
 config = Script.get_config()
 
-#RPM versioning support
-rpm_version = default("/configurations/cluster-env/rpm_version", None)
+hdp_stack_version = str(config['hostLevelParams']['stack_version'])
+stack_is_hdp22_or_further = not (hdp_stack_version.startswith('2.0') or hdp_stack_version.startswith('2.1'))
 
 #hadoop params
-if rpm_version:
+if stack_is_hdp22_or_further:
   slider_bin_dir = '/usr/hdp/current/slider-client/bin'
 else:
   slider_bin_dir = "/usr/lib/slider/bin"

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/SQOOP/metainfo.xml

@@ -34,7 +34,7 @@
           <osFamily>redhat5,redhat6,suse11</osFamily>
           <packages>
             <package>
-              <name>sqoop_2_2_0_0_*</name>
+              <name>sqoop_2_2_*</name>
             </package>
           </packages>
         </osSpecific>
@@ -42,7 +42,7 @@
           <osFamily>ubuntu12</osFamily>
           <packages>
             <package>
-              <name>sqoop-2-2-0-0-.*</name>
+              <name>sqoop-2-2-.*</name>
             </package>
           </packages>
         </osSpecific>

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/metainfo.xml

@@ -34,7 +34,7 @@
           <osFamily>redhat5,redhat6,suse11</osFamily>
           <packages>
             <package>
-              <name>storm_2_2_0_0_*</name>
+              <name>storm_2_2_*</name>
             </package>
           </packages>
         </osSpecific>
@@ -42,7 +42,7 @@
           <osFamily>ubuntu12</osFamily>
           <packages>
             <package>
-              <name>storm-2-2-0-0-.*</name>
+              <name>storm-2-2-.*</name>
             </package>
           </packages>
         </osSpecific>

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/metainfo.xml

@@ -28,7 +28,7 @@
           <osFamily>redhat5,redhat6,suse11</osFamily>
           <packages>
             <package>
-              <name>tez_2_2_0_0_*</name>
+              <name>tez_2_2_*</name>
             </package>
           </packages>
         </osSpecific>
@@ -36,7 +36,7 @@
           <osFamily>ubuntu12</osFamily>
           <packages>
             <package>
-              <name>tez-2-2-0-0-.*</name>
+              <name>tez-2-2-.*</name>
             </package>
           </packages>
         </osSpecific>

+ 6 - 6
ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/metainfo.xml

@@ -35,10 +35,10 @@
           <osFamily>redhat5,redhat6,suse11</osFamily>
           <packages>
             <package>
-              <name>hadoop_2_2_0_0_*-yarn</name>
+              <name>hadoop_2_2_*-yarn</name>
             </package>
             <package>
-              <name>hadoop_2_2_0_0_*-mapreduce</name>
+              <name>hadoop_2_2_*-mapreduce</name>
             </package>
           </packages>
         </osSpecific>
@@ -46,10 +46,10 @@
           <osFamily>ubuntu12</osFamily>
           <packages>
             <package>
-              <name>hadoop-2-2-0-0-.*-yarn</name>
+              <name>hadoop-2-2-.*-yarn</name>
             </package>
             <package>
-              <name>hadoop-2-2-0-0-.*-mapreduce</name>
+              <name>hadoop-2-2-.*-mapreduce</name>
             </package>
           </packages>
         </osSpecific>
@@ -65,7 +65,7 @@
           <osFamily>redhat5,redhat6,suse11</osFamily>
           <packages>
             <package>
-              <name>hadoop_2_2_0_0_*-mapreduce</name>
+              <name>hadoop_2_2_*-mapreduce</name>
             </package>
           </packages>
         </osSpecific>
@@ -73,7 +73,7 @@
           <osFamily>ubuntu12</osFamily>
           <packages>
             <package>
-              <name>hadoop-2-2-0-0-.*-mapreduce</name>
+              <name>hadoop-2-2-.*-mapreduce</name>
             </package>
           </packages>
         </osSpecific>

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/ZOOKEEPER/metainfo.xml

@@ -28,7 +28,7 @@
           <osFamily>redhat5,redhat6,suse11</osFamily>
           <packages>
             <package>
-              <name>zookeeper_2_2_0_0_*</name>
+              <name>zookeeper_2_2_*</name>
             </package>
           </packages>
         </osSpecific>
@@ -36,7 +36,7 @@
           <osFamily>ubuntu12</osFamily>
           <packages>
             <package>
-              <name>zookeeper-2-2-0-0-.*</name>
+              <name>zookeeper-2-2-.*</name>
             </package>
           </packages>
         </osSpecific>

+ 4 - 4
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py

@@ -142,7 +142,7 @@ class TestDatanode(RMFTestCase):
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  su -s /bin/bash - root -c \'export HADOOP_SECURE_DN_PID_DIR=/var/run/hadoop/hdfs && export HADOOP_SECURE_DN_LOG_DIR=/var/log/hadoop/hdfs && export HADOOP_SECURE_DN_USER=hdfs && export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode\'',
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  su -s /bin/bash - root -c \'export HADOOP_SECURE_DN_PID_DIR=/var/run/hadoop/hdfs && export HADOOP_SECURE_DN_LOG_DIR=/var/log/hadoop/hdfs && export HADOOP_SECURE_DN_USER=hdfs && export HADOOP_LIBEXEC_DIR=/usr/hdp/current/hadoop-client/libexec && /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode\'',
                               not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               )
     self.assertNoMoreResources()
@@ -175,7 +175,7 @@ class TestDatanode(RMFTestCase):
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  su -s /bin/bash - hdfs -c \'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode\'',
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  su -s /bin/bash - hdfs -c \'export HADOOP_LIBEXEC_DIR=/usr/hdp/current/hadoop-client/libexec && /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode\'',
                               not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               )
     self.assertNoMoreResources()
@@ -233,7 +233,7 @@ class TestDatanode(RMFTestCase):
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  su -s /bin/bash - root -c \'export HADOOP_SECURE_DN_PID_DIR=/var/run/hadoop/hdfs && export HADOOP_SECURE_DN_LOG_DIR=/var/log/hadoop/hdfs && export HADOOP_SECURE_DN_USER=hdfs && export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode\'',
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  su -s /bin/bash - root -c \'export HADOOP_SECURE_DN_PID_DIR=/var/run/hadoop/hdfs && export HADOOP_SECURE_DN_LOG_DIR=/var/log/hadoop/hdfs && export HADOOP_SECURE_DN_USER=hdfs && export HADOOP_LIBEXEC_DIR=/usr/hdp/current/hadoop-client/libexec && /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode\'',
                               not_if = None,
                               )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
@@ -269,7 +269,7 @@ class TestDatanode(RMFTestCase):
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  su -s /bin/bash - hdfs -c \'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode\'',
+    self.assertResourceCalled('Execute', 'ulimit -c unlimited;  su -s /bin/bash - hdfs -c \'export HADOOP_LIBEXEC_DIR=/usr/hdp/current/hadoop-client/libexec && /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode\'',
                               not_if=None,
                               )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',

File diff suppressed because it is too large
+ 0 - 0
ambari-server/src/test/python/stacks/2.1/configs/default-storm-start.json


File diff suppressed because it is too large
+ 0 - 0
ambari-server/src/test/python/stacks/2.1/configs/secured-storm-start.json


+ 1 - 1
ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py

@@ -79,7 +79,7 @@ class TestSliderClient(RMFTestCase):
                        config_file="default.json"
     )
 
-    self.assertResourceCalled('Execute', ' /usr/hdp/current/slider-client/bin/slider list',
+    self.assertResourceCalled('Execute', ' /usr/lib/slider/bin/slider list',
                               logoutput=True,
                               tries=3,
                               user='ambari-qa',

+ 0 - 1
ambari-server/src/test/python/stacks/2.2/configs/default.json

@@ -45,7 +45,6 @@
             "yarn.resourcemanager.scheduler.address": "c6401.ambari.apache.org:8030"
         },
         "cluster-env": {
-            "rpm_version": "2.2.0.0",
             "security_enabled": "false",
             "ignore_groupsusers_create": "false",
             "smokeuser": "ambari-qa",

Some files were not shown because too many files changed in this diff