Browse Source

AMBARI-11049. Some Hadoop Directory Parameters Are Wrong On Runnings Processes After Upgrade (ncole)

Nate Cole 10 years ago
parent
commit
3da48c232f
17 changed files with 149 additions and 54 deletions
  1. 47 10
      ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
  2. 1 2
      ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
  3. 3 2
      ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
  4. 1 2
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
  5. 3 6
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
  6. 1 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
  7. 1 0
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
  8. 1 1
      ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
  9. 2 6
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
  10. 1 2
      ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
  11. 1 1
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
  12. 1 2
      ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
  13. 3 7
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
  14. 3 4
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
  15. 1 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
  16. 4 6
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
  17. 75 0
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py

+ 47 - 10
ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py

@@ -18,13 +18,21 @@ limitations under the License.
 
 
 """
 """
 
 
-__all__ = ["select", "create"]
+__all__ = ["select", "create", "get_hadoop_conf_dir", "get_hadoop_dir"]
 
 
 import version
 import version
 from resource_management.core import shell
 from resource_management.core import shell
+from resource_management.core.exceptions import Fail
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.script.script import Script
 
 
 TEMPLATE = "conf-select {0} --package {1} --stack-version {2} --conf-version 0"
 TEMPLATE = "conf-select {0} --package {1} --stack-version {2} --conf-version 0"
+HADOOP_DIR_TEMPLATE = "/usr/hdp/{0}/{1}/{2}"
+HADOOP_DIR_DEFAULTS = {
+  "libexec": "/usr/lib/hadoop/libexec",
+  "sbin": "/usr/lib/hadoop/sbin",
+  "bin": "/usr/bin",
+  "lib": "/usr/lib/hadoop/lib"
+}
 
 
 def _valid(stack_name, package, ver):
 def _valid(stack_name, package, ver):
   if stack_name != "HDP":
   if stack_name != "HDP":
@@ -35,6 +43,17 @@ def _valid(stack_name, package, ver):
 
 
   return True
   return True
 
 
+def _is_upgrade():
+  from resource_management.libraries.functions.default import default
+  direction = default("/commandParams/upgrade_direction", None)
+  stack_name = default("/hostLevelParams/stack_name", None)
+  ver = default("/commandParams/version", None)
+
+  if direction and stack_name and ver:
+    return (stack_name, ver)
+
+  return None
+
 def create(stack_name, package, version):
 def create(stack_name, package, version):
   """
   """
   Creates a config version for the specified package
   Creates a config version for the specified package
@@ -76,22 +95,40 @@ def get_hadoop_conf_dir():
       the configs are written in the correct place
       the configs are written in the correct place
   """
   """
 
 
-  config = Script.get_config()
   hadoop_conf_dir = "/etc/hadoop/conf"
   hadoop_conf_dir = "/etc/hadoop/conf"
 
 
   if Script.is_hdp_stack_greater_or_equal("2.2"):
   if Script.is_hdp_stack_greater_or_equal("2.2"):
-    from resource_management.libraries.functions.default import default
-
     hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
     hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
 
 
-    direction = default("/commandParams/upgrade_direction", None)
-    ver = default("/commandParams/version", None)
-    stack_name = default("/hostLevelParams/stack_name", None)
+    res = _is_upgrade()
 
 
-    if direction and ver and stack_name and Script.is_hdp_stack_greater_or_equal("2.3"):
-      select(stack_name, "hadoop", ver)
-      hadoop_conf_dir = "/usr/hdp/{0}/hadoop/conf".format(ver)
+    if res is not None and Script.is_hdp_stack_greater_or_equal("2.3"):
+      select(res[0], "hadoop", res[1])
+      hadoop_conf_dir = "/usr/hdp/{0}/hadoop/conf".format(res[1])
 
 
   return hadoop_conf_dir
   return hadoop_conf_dir
 
 
+def get_hadoop_dir(target):
+  """
+  Return the hadoop shared directory in the following override order
+  1. Use default for 2.1 and lower
+  2. If 2.2 and higher, use /usr/hdp/current/hadoop-client/{target}
+  3. If 2.2 and higher AND for an upgrade, use /usr/hdp/<version>/hadoop/{target}
+  :target: the target directory
+  """
+
+  if not target in HADOOP_DIR_DEFAULTS:
+    raise Fail("Target {0} not defined".format(target))
+
+  hadoop_dir = HADOOP_DIR_DEFAULTS[target]
+
+  if Script.is_hdp_stack_greater_or_equal("2.2"):
+    hadoop_dir = HADOOP_DIR_TEMPLATE.format("current", "hadoop-client", target)
+
+    res = _is_upgrade()
+
+    if res is not None:
+      hadoop_dir = HADOOP_DIR_TEMPLATE.format(res[1], "hadoop", target)
+
+  return hadoop_dir
     
     

+ 1 - 2
ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py

@@ -46,8 +46,7 @@ conf_dir = status_params.conf_dir
 server_conf_dir = status_params.server_conf_dir
 server_conf_dir = status_params.server_conf_dir
 
 
 # service locations
 # service locations
-hadoop_prefix = "/usr/hdp/current/hadoop-client"
-hadoop_bin_dir = format("{hadoop_prefix}/bin")
+hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
 zookeeper_home = "/usr/hdp/current/zookeeper-client"
 zookeeper_home = "/usr/hdp/current/zookeeper-client"
 
 
 # the configuration direction for HDFS/YARN/MapR is the hadoop config
 # the configuration direction for HDFS/YARN/MapR is the hadoop config

+ 3 - 2
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py

@@ -18,6 +18,7 @@ limitations under the License.
 """
 """
 import status_params
 import status_params
 
 
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.default import default
@@ -37,8 +38,9 @@ hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 etc_prefix_dir = "/etc/falcon"
 etc_prefix_dir = "/etc/falcon"
 
 
 # hadoop params
 # hadoop params
+hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
+
 if Script.is_hdp_stack_greater_or_equal("2.2"):
 if Script.is_hdp_stack_greater_or_equal("2.2"):
-  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
 
 
   # if this is a server action, then use the server binaries; smoke tests
   # if this is a server action, then use the server binaries; smoke tests
   # use the client binaries
   # use the client binaries
@@ -53,7 +55,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   falcon_webapp_dir = format('/usr/hdp/current/{falcon_root}/webapp')
   falcon_webapp_dir = format('/usr/hdp/current/{falcon_root}/webapp')
   falcon_home = format('/usr/hdp/current/{falcon_root}')
   falcon_home = format('/usr/hdp/current/{falcon_root}')
 else:
 else:
-  hadoop_bin_dir = "/usr/bin"
   falcon_webapp_dir = '/var/lib/falcon/webapp'
   falcon_webapp_dir = '/var/lib/falcon/webapp'
   falcon_home = '/usr/lib/falcon'
   falcon_home = '/usr/lib/falcon'
 
 

+ 1 - 2
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py

@@ -48,7 +48,7 @@ stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
 
 # hadoop default parameters
 # hadoop default parameters
-hadoop_bin_dir = "/usr/bin"
+hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
 daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
 region_mover = "/usr/lib/hbase/bin/region_mover.rb"
 region_mover = "/usr/lib/hbase/bin/region_mover.rb"
@@ -57,7 +57,6 @@ hbase_cmd = "/usr/lib/hbase/bin/hbase"
 
 
 # hadoop parameters for 2.2+
 # hadoop parameters for 2.2+
 if Script.is_hdp_stack_greater_or_equal("2.2"):
 if Script.is_hdp_stack_greater_or_equal("2.2"):
-  hadoop_bin_dir = format("/usr/hdp/current/hadoop-client/bin")
   daemon_script = format('/usr/hdp/current/hbase-client/bin/hbase-daemon.sh')
   daemon_script = format('/usr/hdp/current/hbase-client/bin/hbase-daemon.sh')
   region_mover = format('/usr/hdp/current/hbase-client/bin/region_mover.rb')
   region_mover = format('/usr/hdp/current/hbase-client/bin/region_mover.rb')
   region_drainer = format('/usr/hdp/current/hbase-client/bin/draining_servers.rb')
   region_drainer = format('/usr/hdp/current/hbase-client/bin/draining_servers.rb')

+ 3 - 6
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py

@@ -62,9 +62,9 @@ secure_dn_ports_are_in_use = False
 
 
 # hadoop default parameters
 # hadoop default parameters
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
-hadoop_bin = "/usr/lib/hadoop/sbin"
-hadoop_bin_dir = "/usr/bin"
+hadoop_libexec_dir = conf_select.get_hadoop_dir("libexec")
+hadoop_bin = conf_select.get_hadoop_dir("sbin")
+hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
 hadoop_home = "/usr/lib/hadoop"
 hadoop_home = "/usr/lib/hadoop"
 hadoop_secure_dn_user = hdfs_user
 hadoop_secure_dn_user = hdfs_user
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
@@ -72,9 +72,6 @@ hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 # hadoop parameters for 2.2+
 # hadoop parameters for 2.2+
 if Script.is_hdp_stack_greater_or_equal("2.2"):
 if Script.is_hdp_stack_greater_or_equal("2.2"):
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
-  hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
-  hadoop_bin = "/usr/hdp/current/hadoop-client/sbin"
-  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = "/usr/hdp/current/hadoop-client"
   hadoop_home = "/usr/hdp/current/hadoop-client"
 
 
   if not security_enabled:
   if not security_enabled:

+ 1 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py

@@ -71,6 +71,7 @@ webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
 # use the directories from status_params as they are already calculated for
 # use the directories from status_params as they are already calculated for
 # the correct version of HDP
 # the correct version of HDP
 hadoop_conf_dir = status_params.hadoop_conf_dir
 hadoop_conf_dir = status_params.hadoop_conf_dir
+hadoop_bin_dir = status_params.hadoop_bin_dir
 webhcat_conf_dir = status_params.webhcat_conf_dir
 webhcat_conf_dir = status_params.webhcat_conf_dir
 hive_conf_dir = status_params.hive_conf_dir
 hive_conf_dir = status_params.hive_conf_dir
 hive_config_dir = status_params.hive_config_dir
 hive_config_dir = status_params.hive_config_dir
@@ -87,7 +88,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   hive_specific_configs_supported = True
   hive_specific_configs_supported = True
 
 
   component_directory = status_params.component_directory
   component_directory = status_params.component_directory
-  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   hadoop_home = '/usr/hdp/current/hadoop-client'
   hive_bin = format('/usr/hdp/current/{component_directory}/bin')
   hive_bin = format('/usr/hdp/current/{component_directory}/bin')
   hive_lib = format('/usr/hdp/current/{component_directory}/lib')
   hive_lib = format('/usr/hdp/current/{component_directory}/lib')

+ 1 - 0
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py

@@ -71,6 +71,7 @@ else:
 
 
   # default configuration directories
   # default configuration directories
   hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
   hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+  hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
   webhcat_conf_dir = '/etc/hive-webhcat/conf'
   webhcat_conf_dir = '/etc/hive-webhcat/conf'
   hive_etc_dir_prefix = "/etc/hive"
   hive_etc_dir_prefix = "/etc/hive"
   hive_conf_dir = "/etc/hive/conf"
   hive_conf_dir = "/etc/hive/conf"

+ 1 - 1
ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py

@@ -45,7 +45,7 @@ mahout_conf_dir = "/usr/hdp/current/mahout-client/conf"
 mahout_user = config['configurations']['mahout-env']['mahout_user']
 mahout_user = config['configurations']['mahout-env']['mahout_user']
 
 
 #hadoop params
 #hadoop params
-hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
+hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
 hadoop_home = '/usr/hdp/current/hadoop-client'
 hadoop_home = '/usr/hdp/current/hadoop-client'
 
 
 # the configuration direction for HDFS/YARN/MapR is the hadoop config
 # the configuration direction for HDFS/YARN/MapR is the hadoop config

+ 2 - 6
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py

@@ -47,13 +47,11 @@ stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
 
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
+hadoop_lib_home = conf_select.get_hadoop_dir("lib")
 
 
 #hadoop params
 #hadoop params
 if Script.is_hdp_stack_greater_or_equal("2.2"):
 if Script.is_hdp_stack_greater_or_equal("2.2"):
-  # start out assuming client libraries
-  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
-  hadoop_lib_home = "/usr/hdp/current/hadoop-client/lib"
-
   # oozie-server or oozie-client, depending on role
   # oozie-server or oozie-client, depending on role
   oozie_root = status_params.component_directory
   oozie_root = status_params.component_directory
 
 
@@ -74,8 +72,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   hive_conf_dir = format("{conf_dir}/action-conf/hive")
   hive_conf_dir = format("{conf_dir}/action-conf/hive")
 
 
 else:
 else:
-  hadoop_bin_dir = "/usr/bin"
-  hadoop_lib_home = "/usr/lib/hadoop/lib"
   oozie_lib_dir = "/var/lib/oozie"
   oozie_lib_dir = "/var/lib/oozie"
   oozie_setup_sh = "/usr/lib/oozie/bin/oozie-setup.sh"
   oozie_setup_sh = "/usr/lib/oozie/bin/oozie-setup.sh"
   oozie_webapps_dir = "/var/lib/oozie/oozie-server/webapps/"
   oozie_webapps_dir = "/var/lib/oozie/oozie-server/webapps/"

+ 1 - 2
ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py

@@ -40,15 +40,14 @@ version = default("/commandParams/version", None)
 
 
 # hadoop default parameters
 # hadoop default parameters
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
 pig_conf_dir = "/etc/pig/conf"
 pig_conf_dir = "/etc/pig/conf"
-hadoop_bin_dir = "/usr/bin"
 hadoop_home = '/usr'
 hadoop_home = '/usr'
 pig_bin_dir = ""
 pig_bin_dir = ""
 
 
 # hadoop parameters for 2.2+
 # hadoop parameters for 2.2+
 if Script.is_hdp_stack_greater_or_equal("2.2"):
 if Script.is_hdp_stack_greater_or_equal("2.2"):
   pig_conf_dir = "/usr/hdp/current/pig-client/conf"
   pig_conf_dir = "/usr/hdp/current/pig-client/conf"
-  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   hadoop_home = '/usr/hdp/current/hadoop-client'
   pig_bin_dir = '/usr/hdp/current/pig-client/bin'
   pig_bin_dir = '/usr/hdp/current/pig-client/bin'
 
 

+ 1 - 1
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py

@@ -61,10 +61,10 @@ version = default("/commandParams/version", None)
 
 
 spark_conf = '/etc/spark/conf'
 spark_conf = '/etc/spark/conf'
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
 
 
 if Script.is_hdp_stack_greater_or_equal("2.2"):
 if Script.is_hdp_stack_greater_or_equal("2.2"):
   hadoop_home = "/usr/hdp/current/hadoop-client"
   hadoop_home = "/usr/hdp/current/hadoop-client"
-  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   spark_conf = format("/usr/hdp/current/{component_directory}/conf")
   spark_conf = format("/usr/hdp/current/{component_directory}/conf")
   spark_log_dir = config['configurations']['spark-env']['spark_log_dir']
   spark_log_dir = config['configurations']['spark-env']['spark_log_dir']
   spark_pid_dir = status_params.spark_pid_dir
   spark_pid_dir = status_params.spark_pid_dir

+ 1 - 2
ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py

@@ -42,7 +42,7 @@ version = default("/commandParams/version", None)
 
 
 # default hadoop parameters
 # default hadoop parameters
 hadoop_home = '/usr'
 hadoop_home = '/usr'
-hadoop_bin_dir = "/usr/bin"
+hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 tez_etc_dir = "/etc/tez"
 tez_etc_dir = "/etc/tez"
 config_dir = "/etc/tez/conf"
 config_dir = "/etc/tez/conf"
@@ -50,7 +50,6 @@ path_to_tez_examples_jar = "/usr/lib/tez/tez-mapreduce-examples*.jar"
 
 
 # hadoop parameters for 2.2+
 # hadoop parameters for 2.2+
 if Script.is_hdp_stack_greater_or_equal("2.2"):
 if Script.is_hdp_stack_greater_or_equal("2.2"):
-  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   path_to_tez_examples_jar = "/usr/hdp/{hdp_version}/tez/tez-examples*.jar"
   path_to_tez_examples_jar = "/usr/hdp/{hdp_version}/tez/tez-examples*.jar"
 
 
 # tez only started linking /usr/hdp/x.x.x.x/tez-client/conf in HDP 2.3+
 # tez only started linking /usr/hdp/x.x.x.x/tez-client/conf in HDP 2.3+

+ 3 - 7
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py

@@ -60,9 +60,9 @@ version = default("/commandParams/version", None)
 hostname = config['hostname']
 hostname = config['hostname']
 
 
 # hadoop default parameters
 # hadoop default parameters
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
-hadoop_bin = "/usr/lib/hadoop/sbin"
-hadoop_bin_dir = "/usr/bin"
+hadoop_libexec_dir = conf_select.get_hadoop_dir("libexec")
+hadoop_bin = conf_select.get_hadoop_dir("sbin")
+hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_yarn_home = '/usr/lib/hadoop-yarn'
 hadoop_yarn_home = '/usr/lib/hadoop-yarn'
 hadoop_mapred2_jar_location = "/usr/lib/hadoop-mapreduce"
 hadoop_mapred2_jar_location = "/usr/lib/hadoop-mapreduce"
@@ -84,10 +84,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   if command_role in YARN_SERVER_ROLE_DIRECTORY_MAP:
   if command_role in YARN_SERVER_ROLE_DIRECTORY_MAP:
     yarn_role_root = YARN_SERVER_ROLE_DIRECTORY_MAP[command_role]
     yarn_role_root = YARN_SERVER_ROLE_DIRECTORY_MAP[command_role]
 
 
-  hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
-  hadoop_bin = "/usr/hdp/current/hadoop-client/sbin"
-  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
-
   hadoop_mapred2_jar_location = format("/usr/hdp/current/{mapred_role_root}")
   hadoop_mapred2_jar_location = format("/usr/hdp/current/{mapred_role_root}")
   mapred_bin = format("/usr/hdp/current/{mapred_role_root}/sbin")
   mapred_bin = format("/usr/hdp/current/{mapred_role_root}/sbin")
 
 

+ 3 - 4
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py

@@ -18,6 +18,7 @@ limitations under the License.
 """
 """
 
 
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 from ambari_commons.constants import AMBARI_SUDO_BINARY
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
 from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
 from resource_management import *
 from resource_management import *
 from resource_management.core.system import System
 from resource_management.core.system import System
@@ -31,15 +32,13 @@ hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
 
 # default hadoop params
 # default hadoop params
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
-hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_libexec_dir = conf_select.get_hadoop_dir("libexec")
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 
 
 # HDP 2.2+ params
 # HDP 2.2+ params
 if Script.is_hdp_stack_greater_or_equal("2.2"):
 if Script.is_hdp_stack_greater_or_equal("2.2"):
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
-  hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
-  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
 
 
   # not supported in HDP 2.2+
   # not supported in HDP 2.2+
   hadoop_conf_empty_dir = None
   hadoop_conf_empty_dir = None

+ 1 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py

@@ -72,18 +72,17 @@ def is_secure_port(port):
 
 
 # hadoop default params
 # hadoop default params
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
 hadoop_home = "/usr/lib/hadoop"
 hadoop_home = "/usr/lib/hadoop"
 hadoop_secure_dn_user = hdfs_user
 hadoop_secure_dn_user = hdfs_user
 hadoop_dir = "/etc/hadoop"
 hadoop_dir = "/etc/hadoop"
 versioned_hdp_root = '/usr/hdp/current'
 versioned_hdp_root = '/usr/hdp/current'
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+hadoop_libexec_dir = conf_select.get_hadoop_dir("libexec")
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 
 
 # HDP 2.2+ params
 # HDP 2.2+ params
 if Script.is_hdp_stack_greater_or_equal("2.2"):
 if Script.is_hdp_stack_greater_or_equal("2.2"):
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
-  hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
   hadoop_home = "/usr/hdp/current/hadoop-client"
   hadoop_home = "/usr/hdp/current/hadoop-client"
 
 
   # not supported in HDP 2.2+
   # not supported in HDP 2.2+

+ 4 - 6
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py

@@ -31,9 +31,10 @@ hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
 
 # hadoop default params
 # hadoop default params
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
-hadoop_lib_home = "/usr/lib/hadoop/lib"
-hadoop_bin = "/usr/lib/hadoop/sbin"
+
+hadoop_libexec_dir = conf_select.get_hadoop_dir("libexec")
+hadoop_lib_home = conf_select.get_hadoop_dir("lib")
+hadoop_bin = conf_select.get_hadoop_dir("sbin")
 hadoop_home = '/usr'
 hadoop_home = '/usr'
 create_lib_snappy_symlinks = True
 create_lib_snappy_symlinks = True
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
@@ -42,9 +43,6 @@ default_topology_script_file_path = "/etc/hadoop/conf/topology_script.py"
 # HDP 2.2+ params
 # HDP 2.2+ params
 if Script.is_hdp_stack_greater_or_equal("2.2"):
 if Script.is_hdp_stack_greater_or_equal("2.2"):
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
-  hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
-  hadoop_lib_home = "/usr/hdp/current/hadoop-client/lib"
-  hadoop_bin = "/usr/hdp/current/hadoop-client/sbin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   hadoop_home = '/usr/hdp/current/hadoop-client'
   create_lib_snappy_symlinks = False
   create_lib_snappy_symlinks = False
   
   

+ 75 - 0
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py

@@ -1193,6 +1193,81 @@ class TestNamenode(RMFTestCase):
                               )
                               )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
+  @patch("resource_management.core.shell.call")
+  def test_pre_rolling_restart_21_and_lower_params(self, call_mock):
+    config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
+    with open(config_file, "r") as f:
+      json_content = json.load(f)
+    json_content['hostLevelParams']['stack_name'] = 'HDP'
+    json_content['hostLevelParams']['stack_version'] = '2.0'
+
+    mocks_dict = {}
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "pre_rolling_restart",
+                       config_dict = json_content,
+                       hdp_stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES,
+                       call_mocks = [(0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None)],
+                       mocks_dict = mocks_dict)
+    import sys
+    self.assertEquals("/etc/hadoop/conf", sys.modules["params"].hadoop_conf_dir)
+    self.assertEquals("/usr/lib/hadoop/libexec", sys.modules["params"].hadoop_libexec_dir)
+    self.assertEquals("/usr/bin", sys.modules["params"].hadoop_bin_dir)
+    self.assertEquals("/usr/lib/hadoop/sbin", sys.modules["params"].hadoop_bin)
+
+  @patch("resource_management.core.shell.call")
+  def test_pre_rolling_restart_22_params(self, call_mock):
+    config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
+    with open(config_file, "r") as f:
+      json_content = json.load(f)
+    version = '2.2.0.0-1234'
+    del json_content['commandParams']['version']
+    json_content['hostLevelParams']['stack_name'] = 'HDP'
+    json_content['hostLevelParams']['stack_version'] = '2.2'
+
+    mocks_dict = {}
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "pre_rolling_restart",
+                       config_dict = json_content,
+                       hdp_stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES,
+                       call_mocks = [(0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None)],
+                       mocks_dict = mocks_dict)
+    import sys
+    self.assertEquals("/usr/hdp/current/hadoop-client/conf", sys.modules["params"].hadoop_conf_dir)
+    self.assertEquals("/usr/hdp/current/hadoop-client/libexec", sys.modules["params"].hadoop_libexec_dir)
+    self.assertEquals("/usr/hdp/current/hadoop-client/bin", sys.modules["params"].hadoop_bin_dir)
+    self.assertEquals("/usr/hdp/current/hadoop-client/sbin", sys.modules["params"].hadoop_bin)
+
+  @patch("resource_management.core.shell.call")
+  def test_pre_rolling_restart_23_params(self, call_mock):
+    config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
+    with open(config_file, "r") as f:
+      json_content = json.load(f)
+    version = '2.3.0.0-1234'
+    json_content['commandParams']['version'] = version
+    json_content['commandParams']['upgrade_direction'] = 'upgrade'
+    json_content['hostLevelParams']['stack_name'] = 'HDP'
+    json_content['hostLevelParams']['stack_version'] = '2.3'
+
+    mocks_dict = {}
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "pre_rolling_restart",
+                       config_dict = json_content,
+                       hdp_stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES,
+                       call_mocks = [(0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None)],
+                       mocks_dict = mocks_dict)
+    import sys
+    self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/conf", sys.modules["params"].hadoop_conf_dir)
+    self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/libexec", sys.modules["params"].hadoop_libexec_dir)
+    self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/bin", sys.modules["params"].hadoop_bin_dir)
+    self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/sbin", sys.modules["params"].hadoop_bin)
+
+
 
 
 class Popen_Mock:
 class Popen_Mock:
   return_value = 1
   return_value = 1