Browse Source

AMBARI-7709 Use hdp-select rpm to align on versioend RPM's for the stack. (dsen)

Dmytro Sen 10 years ago
parent
commit
c5495dd879
26 changed files with 103 additions and 94 deletions
  1. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
  2. 2 4
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
  3. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
  4. 3 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
  5. 5 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
  6. 5 5
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
  7. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py
  8. 5 5
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
  9. 4 4
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
  10. 14 14
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
  11. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/files/oozieSmoke2.sh
  12. 12 12
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
  13. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
  14. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/configuration/sqoop-env.xml
  15. 9 7
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
  16. 2 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/sqoop.py
  17. 8 8
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
  18. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
  19. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
  20. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/params.py
  21. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml
  22. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.2/services/SLIDER/package/scripts/params.py
  23. 4 4
      ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-site.xml
  24. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml
  25. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
  26. 1 0
      ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py

@@ -27,8 +27,8 @@ rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 
 #hadoop params
 #hadoop params
 if rpm_version:
 if rpm_version:
-  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce/*"
-  hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
+  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
+  hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
 else:
 else:
   mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
   mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
@@ -73,4 +73,4 @@ hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 user_group = config['configurations']['cluster-env']['user_group']
 user_group = config['configurations']['cluster-env']['user_group']
 
 
 namenode_host = default("/clusterHostInfo/namenode_host", [])
 namenode_host = default("/clusterHostInfo/namenode_host", [])
-has_namenode = not len(namenode_host) == 0
+has_namenode = not len(namenode_host) == 0

+ 2 - 4
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py

@@ -22,9 +22,7 @@ from resource_management import *
 def setup_hdp_install_directory():
 def setup_hdp_install_directory():
   import params
   import params
   if params.rpm_version:
   if params.rpm_version:
-    Execute(format('ln -s /usr/hdp/{rpm_version}-* {versioned_hdp_root}'),
-            not_if=format('ls {versioned_hdp_root}'),
-            only_if=format('ls -d /usr/hdp/{rpm_version}-*')
+    Execute(format('hdp-select set all `hdp-select versions | grep ^{rpm_version}- | tail -1`')
     )
     )
 
 
 def setup_config():
 def setup_config():
@@ -36,4 +34,4 @@ def setup_config():
               configuration_attributes=params.config['configuration_attributes']['core-site'],
               configuration_attributes=params.config['configuration_attributes']['core-site'],
               owner=params.hdfs_user,
               owner=params.hdfs_user,
               group=params.user_group
               group=params.user_group
-  )
+    )

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py

@@ -37,9 +37,9 @@ rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 
 #hadoop params
 #hadoop params
 if rpm_version:
 if rpm_version:
-  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce/*"
-  hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
-  hadoop_home = "/usr/hdp/current/hadoop"
+  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
+  hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
+  hadoop_home = "/usr/hdp/current/hadoop-client"
 else:
 else:
   mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
   mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"

+ 3 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py

@@ -25,6 +25,9 @@ import collections
 config = Script.get_config()
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 tmp_dir = Script.get_tmp_dir()
 
 
+#RPM versioning support
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
+
 #users and groups
 #users and groups
 hbase_user = config['configurations']['hbase-env']['hbase_user']
 hbase_user = config['configurations']['hbase-env']['hbase_user']
 nagios_user = config['configurations']['nagios-env']['nagios_user']
 nagios_user = config['configurations']['nagios-env']['nagios_user']

+ 5 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py

@@ -56,4 +56,8 @@ def setup_java():
   )
   )
 
 
 def install_packages():
 def install_packages():
-  Package(['unzip', 'curl'])
+  import params
+  packages = ['unzip', 'curl']
+  if params.rpm_version:
+    packages.append('hdp-select')
+  Package(packages)

+ 5 - 5
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py

@@ -28,11 +28,11 @@ rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 
 #hadoop params
 #hadoop params
 if rpm_version:
 if rpm_version:
-  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce/*"
-  hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
-  hadoop_lib_home = "/usr/hdp/current/hadoop/lib"
-  hadoop_bin = "/usr/hdp/current/hadoop/sbin"
-  hadoop_home = '/usr/hdp/current/hadoop'
+  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
+  hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
+  hadoop_lib_home = "/usr/hdp/current/hadoop-client/lib"
+  hadoop_bin = "/usr/hdp/current/hadoop-client/sbin"
+  hadoop_home = '/usr/hdp/current/hadoop-client'
 else:
 else:
   mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
   mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py

@@ -31,7 +31,7 @@ rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 
 #hadoop params
 #hadoop params
 if rpm_version:
 if rpm_version:
-  flume_bin = '/usr/hdp/current/flume/bin/flume-ng'
+  flume_bin = '/usr/hdp/current/flume-client/bin/flume-ng'
 else:
 else:
   flume_bin = '/usr/bin/flume-ng'
   flume_bin = '/usr/bin/flume-ng'
 
 

+ 5 - 5
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py

@@ -31,11 +31,11 @@ rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 
 #hadoop params
 #hadoop params
 if rpm_version:
 if rpm_version:
-  hadoop_bin_dir = format("/usr/hdp/current/hadoop/bin")
-  daemon_script = format('/usr/hdp/current/hbase/bin/hbase-daemon.sh')
-  region_mover = format('/usr/hdp/current/hbase/bin/region_mover.rb')
-  region_drainer = format('/usr/hdp/current/hbase/bin/draining_servers.rb')
-  hbase_cmd = format('/usr/hdp/current/hbase/bin/hbase')
+  hadoop_bin_dir = format("/usr/hdp/current/hadoop-client/bin")
+  daemon_script = format('/usr/hdp/current/hbase-client/bin/hbase-daemon.sh')
+  region_mover = format('/usr/hdp/current/hbase-client/bin/region_mover.rb')
+  region_drainer = format('/usr/hdp/current/hbase-client/bin/draining_servers.rb')
+  hbase_cmd = format('/usr/hdp/current/hbase-client/bin/hbase')
 else:
 else:
   hadoop_bin_dir = "/usr/bin"
   hadoop_bin_dir = "/usr/bin"
   daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
   daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"

+ 4 - 4
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py

@@ -29,10 +29,10 @@ rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 
 #hadoop params
 #hadoop params
 if rpm_version:
 if rpm_version:
-  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce/*"
-  hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
-  hadoop_bin = "/usr/hdp/current/hadoop/sbin"
-  hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"
+  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
+  hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
+  hadoop_bin = "/usr/hdp/current/hadoop-client/sbin"
+  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
 else:
 else:
   mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
   mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"

+ 14 - 14
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py

@@ -33,20 +33,20 @@ hdp_stack_version = config['hostLevelParams']['stack_version']
 
 
 #hadoop params
 #hadoop params
 if rpm_version:
 if rpm_version:
-  hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"
-  hadoop_home = '/usr/hdp/current/hadoop'
-  hadoop_streeming_jars = "/usr/hdp/current/hadoop-mapreduce/hadoop-streaming-*.jar"
-  hive_bin = '/usr/hdp/current/hive/bin'
-  hive_lib = '/usr/hdp/current/hive/lib'
-  tez_local_api_jars = '/usr/hdp/current/tez/tez*.jar'
-  tez_local_lib_jars = '/usr/hdp/current/tez/lib/*.jar'
-  tez_tar_file = "/usr/hdp/current/tez/lib/tez*.tar.gz"
-  pig_tar_file = '/usr/hdp/current/pig/pig.tar.gz'
-  hive_tar_file = '/usr/hdp/current/hive/hive.tar.gz'
-  sqoop_tar_file = '/usr/hdp/current/sqoop/sqoop*.tar.gz'
-
-  hcat_lib = '/usr/hdp/current/hive/hive-hcatalog/share/hcatalog'
-  webhcat_bin_dir = '/usr/hdp/current/hive-hcatalog/sbin'
+  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
+  hadoop_home = '/usr/hdp/current/hadoop-client'
+  hadoop_streeming_jars = "/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming-*.jar"
+  hive_bin = '/usr/hdp/current/hive-client/bin'
+  hive_lib = '/usr/hdp/current/hive-client/lib'
+  tez_local_api_jars = '/usr/hdp/current/tez-client/tez*.jar'
+  tez_local_lib_jars = '/usr/hdp/current/tez-client/lib/*.jar'
+  tez_tar_file = "/usr/hdp/current/tez-client/lib/tez*.tar.gz"
+  pig_tar_file = '/usr/hdp/current/pig-client/pig.tar.gz'
+  hive_tar_file = '/usr/hdp/current/hive-client/hive.tar.gz'
+  sqoop_tar_file = '/usr/hdp/current/sqoop-client/sqoop*.tar.gz'
+
+  hcat_lib = '/usr/hdp/current/hive-webhcat/share/hcatalog'
+  webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
 
 
 else:
 else:
   hadoop_bin_dir = "/usr/bin"
   hadoop_bin_dir = "/usr/bin"

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/files/oozieSmoke2.sh

@@ -80,7 +80,7 @@ fi
 
 
 export OOZIE_EXAMPLES_DIR=`$LIST_PACKAGE_FILES_CMD oozie-client | grep 'oozie-examples.tar.gz$' | xargs dirname`
 export OOZIE_EXAMPLES_DIR=`$LIST_PACKAGE_FILES_CMD oozie-client | grep 'oozie-examples.tar.gz$' | xargs dirname`
 if [[ -z "$OOZIE_EXAMPLES_DIR" ]] ; then
 if [[ -z "$OOZIE_EXAMPLES_DIR" ]] ; then
-  export OOZIE_EXAMPLES_DIR='/usr/hdp/current/oozie/doc/'
+  export OOZIE_EXAMPLES_DIR='/usr/hdp/current/oozie-client/doc/'
 fi
 fi
 cd $OOZIE_EXAMPLES_DIR
 cd $OOZIE_EXAMPLES_DIR
 
 

+ 12 - 12
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py

@@ -31,18 +31,18 @@ rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 
 #hadoop params
 #hadoop params
 if rpm_version:
 if rpm_version:
-  hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"
-  hadoop_lib_home = "/usr/hdp/current/hadoop/lib"
-  oozie_lib_dir = "/usr/hdp/current/oozie/"
-  oozie_setup_sh = "/usr/hdp/current/oozie/bin/oozie-setup.sh"
-  oozie_webapps_dir = "/usr/hdp/current/oozie/oozie-server/webapps"
-  oozie_webapps_conf_dir = "/usr/hdp/current/oozie/oozie-server/conf"
-  oozie_libext_dir = "/usr/hdp/current/oozie/libext"
-  oozie_server_dir = "/usr/hdp/current/oozie/oozie-server"
-  oozie_shared_lib = "/usr/hdp/current/oozie/share"
-  oozie_home = "/usr/hdp/current/oozie"
-  oozie_bin_dir = "/usr/hdp/current/oozie/bin"
-  falcon_home = '/usr/hdp/current/falcon'
+  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
+  hadoop_lib_home = "/usr/hdp/current/hadoop-client/lib"
+  oozie_lib_dir = "/usr/hdp/current/oozie-client/"
+  oozie_setup_sh = "/usr/hdp/current/oozie-client/bin/oozie-setup.sh"
+  oozie_webapps_dir = "/usr/hdp/current/oozie-client/oozie-server/webapps"
+  oozie_webapps_conf_dir = "/usr/hdp/current/oozie-client/oozie-server/conf"
+  oozie_libext_dir = "/usr/hdp/current/oozie-client/libext"
+  oozie_server_dir = "/usr/hdp/current/oozie-client/oozie-server"
+  oozie_shared_lib = "/usr/hdp/current/oozie-client/share"
+  oozie_home = "/usr/hdp/current/oozie-client"
+  oozie_bin_dir = "/usr/hdp/current/oozie-client/bin"
+  falcon_home = '/usr/hdp/current/falcon-client'
 else:
 else:
   hadoop_bin_dir = "/usr/bin"
   hadoop_bin_dir = "/usr/bin"
   hadoop_lib_home = "/usr/lib/hadoop/lib"
   hadoop_lib_home = "/usr/lib/hadoop/lib"

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py

@@ -30,9 +30,9 @@ rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 
 #hadoop params
 #hadoop params
 if rpm_version:
 if rpm_version:
-  hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"
-  hadoop_home = '/usr/hdp/current/hadoop'
-  pig_bin_dir = '/usr/hdp/current/pig/bin'
+  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
+  hadoop_home = '/usr/hdp/current/hadoop-client'
+  pig_bin_dir = '/usr/hdp/current/pig-client/bin'
 else:
 else:
   hadoop_bin_dir = "/usr/bin"
   hadoop_bin_dir = "/usr/bin"
   hadoop_home = '/usr'
   hadoop_home = '/usr'

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/configuration/sqoop-env.xml

@@ -30,13 +30,13 @@
 
 
 #Set path to where bin/hadoop is available
 #Set path to where bin/hadoop is available
 #Set path to where bin/hadoop is available
 #Set path to where bin/hadoop is available
-export HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}
+export HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
 
 
 #set the path to where bin/hbase is available
 #set the path to where bin/hbase is available
-export HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}
+export HBASE_HOME=${HBASE_HOME:-{{hbase_home}}}
 
 
 #Set the path to where bin/hive is available
 #Set the path to where bin/hive is available
-export HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}
+export HIVE_HOME=${HIVE_HOME:-{{hive_home}}}
 
 
 #Set the path for where zookeper config dir is
 #Set the path for where zookeper config dir is
 export ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}
 export ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}

+ 9 - 7
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py

@@ -26,16 +26,18 @@ rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 
 #hadoop params
 #hadoop params
 if rpm_version:
 if rpm_version:
-  sqoop_conf_dir = '/usr/hdp/current/etc/sqoop/conf'
-  sqoop_lib = '/usr/hdp/current/sqoop/lib'
-  hbase_home = '/usr/hdp/current/hbase'
-  hive_home = '/usr/hdp/current/hive'
-  sqoop_bin_dir = '/usr/hdp/current/sqoop/bin/'
+  sqoop_conf_dir = '/etc/sqoop/conf'
+  sqoop_lib = '/usr/hdp/current/sqoop-client/lib'
+  hadoop_home = '/usr/hdp/current/hbase-client'
+  hbase_home = '/usr/hdp/current/hbase-client'
+  hive_home = '/usr/hdp/current/hive-client'
+  sqoop_bin_dir = '/usr/hdp/current/sqoop-client/bin/'
 else:
 else:
   sqoop_conf_dir = "/usr/lib/sqoop/conf"
   sqoop_conf_dir = "/usr/lib/sqoop/conf"
   sqoop_lib = "/usr/lib/sqoop/lib"
   sqoop_lib = "/usr/lib/sqoop/lib"
-  hbase_home = "/usr"
-  hive_home = "/usr"
+  hadoop_home = '/usr/lib/hadoop'
+  hbase_home = "/usr/lib/hbase"
+  hive_home = "/usr/lib/hive"
   sqoop_bin_dir = "/usr/bin"
   sqoop_bin_dir = "/usr/bin"
 
 
 zoo_conf_dir = "/etc/zookeeper"
 zoo_conf_dir = "/etc/zookeeper"

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/sqoop.py

@@ -27,7 +27,8 @@ def sqoop(type=None):
   ) 
   ) 
   Directory(params.sqoop_conf_dir,
   Directory(params.sqoop_conf_dir,
             owner = params.sqoop_user,
             owner = params.sqoop_user,
-            group = params.user_group
+            group = params.user_group,
+            recursive = True
   )
   )
   
   
   File(format("{sqoop_conf_dir}/sqoop-env.sh"),
   File(format("{sqoop_conf_dir}/sqoop-env.sh"),

+ 8 - 8
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py

@@ -32,14 +32,14 @@ rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 
 #hadoop params
 #hadoop params
 if rpm_version:
 if rpm_version:
-  hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
-  hadoop_bin = "/usr/hdp/current/hadoop/sbin"
-  hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"
-  hadoop_yarn_home = '/usr/hdp/current/hadoop-yarn'
-  hadoop_mapred2_jar_location = '/usr/hdp/current/hadoop-mapreduce'
-  mapred_bin = '/usr/hdp/current/hadoop-mapreduce/sbin'
-  yarn_bin = '/usr/hdp/current/hadoop-yarn/sbin'
-  yarn_container_bin = '/usr/hdp/current/hadoop-yarn/bin'
+  hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
+  hadoop_bin = "/usr/hdp/current/hadoop-client/sbin"
+  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
+  hadoop_yarn_home = '/usr/hdp/current/hadoop-yarn-client'
+  hadoop_mapred2_jar_location = '/usr/hdp/current/hadoop-mapreduce-client'
+  mapred_bin = '/usr/hdp/current/hadoop-mapreduce-client/sbin'
+  yarn_bin = '/usr/hdp/current/hadoop-yarn-client/sbin'
+  yarn_container_bin = '/usr/hdp/current/hadoop-yarn-client/bin'
 else:
 else:
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
   hadoop_bin = "/usr/lib/hadoop/sbin"
   hadoop_bin = "/usr/lib/hadoop/sbin"

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py

@@ -31,9 +31,9 @@ rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 
 #hadoop params
 #hadoop params
 if rpm_version:
 if rpm_version:
-  zk_home = '/usr/hdp/current/zookeeper'
-  zk_bin = '/usr/hdp/current/zookeeper/bin'
-  smoke_script = '/usr/hdp/current/zookeeper/bin/zkCli.sh'
+  zk_home = '/usr/hdp/current/zookeeper-client'
+  zk_bin = '/usr/hdp/current/zookeeper-client/bin'
+  smoke_script = '/usr/hdp/current/zookeeper-client/bin/zkCli.sh'
 else:
 else:
   zk_home = '/usr'
   zk_home = '/usr'
   zk_bin = '/usr/lib/zookeeper/bin'
   zk_bin = '/usr/lib/zookeeper/bin'

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py

@@ -28,9 +28,9 @@ rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 
 #hadoop params
 #hadoop params
 if rpm_version:
 if rpm_version:
-  hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"
-  falcon_webapp_dir = "/usr/hdp/current/falcon/webapp"
-  falcon_home = "/usr/hdp/current/falcon"
+  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
+  falcon_webapp_dir = "/usr/hdp/current/falcon-client/webapp"
+  falcon_home = "/usr/hdp/current/falcon-client"
 else:
 else:
   hadoop_bin_dir = "/usr/bin"
   hadoop_bin_dir = "/usr/bin"
   falcon_webapp_dir = '/var/lib/falcon/webapp'
   falcon_webapp_dir = '/var/lib/falcon/webapp'

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/params.py

@@ -29,8 +29,8 @@ rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 
 #hadoop params
 #hadoop params
 if rpm_version:
 if rpm_version:
-  rest_lib_dir = '/usr/hdp/current/storm/contrib/storm-rest'
-  storm_bin_dir = "/usr/hdp/current/storm/bin"
+  rest_lib_dir = '/usr/hdp/current/storm-client/contrib/storm-rest'
+  storm_bin_dir = "/usr/hdp/current/storm-client/bin"
 else:
 else:
   rest_lib_dir = "/usr/lib/storm/contrib/storm-rest"
   rest_lib_dir = "/usr/lib/storm/contrib/storm-rest"
   storm_bin_dir = "/usr/bin"
   storm_bin_dir = "/usr/bin"

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml

@@ -31,21 +31,21 @@ limitations under the License.
 
 
   <property>
   <property>
     <name>templeton.libjars</name>
     <name>templeton.libjars</name>
-    <value>/usr/hdp/current/zookeeper/zookeeper.jar</value>
+    <value>/usr/hdp/current/zookeeper-client/zookeeper.jar</value>
     <description>Jars to add the the classpath.</description>
     <description>Jars to add the the classpath.</description>
   </property>
   </property>
 
 
 
 
   <property>
   <property>
     <name>templeton.hadoop</name>
     <name>templeton.hadoop</name>
-    <value>/usr/hdp/current/hadoop/bin/hadoop</value>
+    <value>/usr/hdp/current/hadoop-client/bin/hadoop</value>
     <description>The path to the Hadoop executable.</description>
     <description>The path to the Hadoop executable.</description>
   </property>
   </property>
 
 
 
 
   <property>
   <property>
     <name>templeton.hcat</name>
     <name>templeton.hcat</name>
-    <value>/usr/hdp/current/hive/bin/hcat</value>
+    <value>/usr/hdp/current/hive-client/bin/hcat</value>
     <description>The path to the hcatalog executable.</description>
     <description>The path to the hcatalog executable.</description>
   </property>
   </property>
 
 

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.2/services/SLIDER/package/scripts/params.py

@@ -28,7 +28,7 @@ rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 
 #hadoop params
 #hadoop params
 if rpm_version:
 if rpm_version:
-  slider_bin_dir = '/usr/hdp/current/slider/bin'
+  slider_bin_dir = '/usr/hdp/current/slider-client/bin'
 else:
 else:
   slider_bin_dir = "/usr/lib/slider/bin"
   slider_bin_dir = "/usr/lib/slider/bin"
 
 

+ 4 - 4
ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-site.xml

@@ -42,7 +42,7 @@
 
 
   <property>
   <property>
     <name>java.library.path</name>
     <name>java.library.path</name>
-    <value>/usr/local/lib:/opt/local/lib:/usr/lib:/usr/hdp/current/storm/lib</value>
+    <value>/usr/local/lib:/opt/local/lib:/usr/lib:/usr/hdp/current/storm-client/lib</value>
     <description>This value is passed to spawned JVMs (e.g., Nimbus, Supervisor, and Workers)
     <description>This value is passed to spawned JVMs (e.g., Nimbus, Supervisor, and Workers)
       for the java.library.path value. java.library.path tells the JVM where
       for the java.library.path value. java.library.path tells the JVM where
       to look for native libraries. It is necessary to set this config correctly since
       to look for native libraries. It is necessary to set this config correctly since
@@ -51,13 +51,13 @@
 
 
   <property>
   <property>
     <name>nimbus.childopts</name>
     <name>nimbus.childopts</name>
-    <value>-Xmx1024m _JAAS_PLACEHOLDER -javaagent:/usr/hdp/current/storm/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8649,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Nimbus_JVM</value>
+    <value>-Xmx1024m _JAAS_PLACEHOLDER -javaagent:/usr/hdp/current/storm-nimbus/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8649,wireformat31x=true,mode=multicast,config=/usr/hdp-nimbus/current/storm-client/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Nimbus_JVM</value>
     <description>This parameter is used by the storm-deploy project to configure the jvm options for the nimbus daemon.</description>
     <description>This parameter is used by the storm-deploy project to configure the jvm options for the nimbus daemon.</description>
   </property>
   </property>
 
 
   <property>
   <property>
     <name>worker.childopts</name>
     <name>worker.childopts</name>
-    <value>-Xmx768m _JAAS_PLACEHOLDER -javaagent:/usr/hdp/current/storm/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8650,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Worker_%ID%_JVM</value>
+    <value>-Xmx768m _JAAS_PLACEHOLDER -javaagent:/usr/hdp/current/storm-client/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8650,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm-client/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Worker_%ID%_JVM</value>
     <description>The jvm opts provided to workers launched by this supervisor. All \"%ID%\" substrings are replaced with an identifier for this worker.</description>
     <description>The jvm opts provided to workers launched by this supervisor. All \"%ID%\" substrings are replaced with an identifier for this worker.</description>
   </property>
   </property>
 
 
@@ -69,7 +69,7 @@
 
 
   <property>
   <property>
     <name>supervisor.childopts</name>
     <name>supervisor.childopts</name>
-    <value>-Xmx256m _JAAS_PLACEHOLDER -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.port=56431 -javaagent:/usr/hdp/current/storm/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8650,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Supervisor_JVM</value>
+    <value>-Xmx256m _JAAS_PLACEHOLDER -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.port=56431 -javaagent:/usr/hdp/current/storm-supervisor/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8650,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm-supervisor/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Supervisor_JVM</value>
     <description>This parameter is used by the storm-deploy project to configure the jvm options for the supervisor daemon.</description>
     <description>This parameter is used by the storm-deploy project to configure the jvm options for the supervisor daemon.</description>
   </property>
   </property>
   
   

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml

@@ -24,7 +24,7 @@
 
 
   <property>
   <property>
     <name>mapreduce.admin.user.env</name>
     <name>mapreduce.admin.user.env</name>
-    <value>LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/hdp/current/hadoop/lib/native/Linux-amd64-64</value>
+    <value>LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/hdp/current/hadoop-client/lib/native/Linux-amd64-64</value>
     <description>
     <description>
       Additional execution environment entries for map and reduce task processes.
       Additional execution environment entries for map and reduce task processes.
       This is not an additive property. You must preserve the original value if
       This is not an additive property. You must preserve the original value if
@@ -34,7 +34,7 @@
 
 
   <property>
   <property>
     <name>mapreduce.application.classpath</name>
     <name>mapreduce.application.classpath</name>
-    <value>$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*,/usr/hdp/current/hadoop-mapreduce/,/usr/hdp/current/hadoop-mapreduce/lib,/usr/hdp/current/hadoop/</value>
+    <value>$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*,/usr/hdp/current/hadoop-mapreduce-client/,/usr/hdp/current/hadoop-mapreduce-client/lib,/usr/hdp/current/hadoop-client/</value>
     <description>
     <description>
       CLASSPATH for MR applications. A comma-separated list of CLASSPATH
       CLASSPATH for MR applications. A comma-separated list of CLASSPATH
       entries.
       entries.

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml

@@ -23,7 +23,7 @@
 
 
   <property>
   <property>
     <name>yarn.application.classpath</name>
     <name>yarn.application.classpath</name>
-    <value>/etc/hadoop/conf,/usr/hdp/current/hadoop/*,/usr/hdp/current/hadoop/lib/*,/usr/hdp/current/hadoop-hdfs/*,/usr/hdp/current/hadoop-hdfs/lib/*,/usr/hdp/current/hadoop-yarn/*,/usr/hdp/current/hadoop-yarn/lib/*,/usr/hdp/current/hadoop-mapreduce/*,/usr/hdp/current/hadoop-mapreduce/lib/*</value>
+    <value>/etc/hadoop/conf,/usr/hdp/current/hadoop-client/*,/usr/hdp/current/hadoop-client/lib/*,/usr/hdp/current/hadoop-hdfs-client/*,/usr/hdp/current/hadoop-hdfs-client/lib/*,/usr/hdp/current/hadoop-yarn-client/*,/usr/hdp/current/hadoop-yarn-client/lib/*,/usr/hdp/current/hadoop-mapreduce-client/*,/usr/hdp/current/hadoop-mapreduce-client/lib/*</value>
     <description>Classpath for typical applications.</description>
     <description>Classpath for typical applications.</description>
   </property>
   </property>
 
 

+ 1 - 0
ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py

@@ -31,6 +31,7 @@ class TestSqoop(RMFTestCase):
     self.assertResourceCalled('Link', '/usr/lib/sqoop/lib/mysql-connector-java.jar',
     self.assertResourceCalled('Link', '/usr/lib/sqoop/lib/mysql-connector-java.jar',
                               to = '/usr/share/java/mysql-connector-java.jar',)
                               to = '/usr/share/java/mysql-connector-java.jar',)
     self.assertResourceCalled('Directory', '/usr/lib/sqoop/conf',
     self.assertResourceCalled('Directory', '/usr/lib/sqoop/conf',
+                              recursive = True,
                               owner = 'sqoop',
                               owner = 'sqoop',
                               group = 'hadoop',)
                               group = 'hadoop',)
     self.assertResourceCalled('File', '/usr/lib/sqoop/conf/sqoop-env.sh',
     self.assertResourceCalled('File', '/usr/lib/sqoop/conf/sqoop-env.sh',