Browse Source

AMBARI-7407 Move service configs to /etc/ for versioned RPM's (dsen)

Dmytro Sen 10 years ago
parent
commit
d7ed431620
65 changed files with 249 additions and 356 deletions
  1. 4 4
      ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
  2. 6 6
      ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
  3. 2 1
      ambari-agent/src/test/python/resource_management/TestExecuteResource.py
  4. 4 8
      ambari-common/src/main/python/resource_management/core/providers/system.py
  5. 15 9
      ambari-common/src/main/python/resource_management/core/shell.py
  6. 4 0
      ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
  7. 1 5
      ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
  8. 1 5
      ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
  9. 1 0
      ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
  10. 5 6
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
  11. 4 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
  12. 6 7
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
  13. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py
  14. 2 4
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py
  15. 7 9
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
  16. 8 10
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
  17. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/service_check.py
  18. 2 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat.py
  19. 4 6
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
  20. 2 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
  21. 28 42
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
  22. 4 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py
  23. 5 7
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
  24. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/pig.py
  25. 5 6
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
  26. 4 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/mapred_service_check.py
  27. 11 13
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
  28. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service_check.py
  29. 3 4
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
  30. 5 7
      ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
  31. 10 1
      ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/params.py
  32. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.2/services/FALCON/metainfo.xml
  33. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.2/services/FLUME/metainfo.xml
  34. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml
  35. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
  36. 0 34
      ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hdfs-site.xml
  37. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml
  38. 4 10
      ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml
  39. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
  40. 0 13
      ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml
  41. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml
  42. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.2/services/PIG/metainfo.xml
  43. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/SLIDER/package/scripts/params.py
  44. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.2/services/SQOOP/metainfo.xml
  45. 0 29
      ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-env.xml
  46. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-site.xml
  47. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/metainfo.xml
  48. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/metainfo.xml
  49. 0 43
      ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/metainfo.xml
  50. 10 1
      ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml
  51. 3 8
      ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
  52. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/metainfo.xml
  53. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.2/services/ZOOKEEPER/metainfo.xml
  54. 1 1
      ambari-server/src/test/python/stacks/1.3.2/HDFS/test_service_check.py
  55. 2 2
      ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
  56. 1 1
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
  57. 4 0
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py
  58. 4 5
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
  59. 7 5
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
  60. 7 10
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
  61. 8 0
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
  62. 2 0
      ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
  63. 8 0
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py
  64. 2 2
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_service_check.py
  65. 4 4
      ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py

+ 4 - 4
ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py

@@ -36,10 +36,10 @@ class TestCopyFromLocal(TestCase):
       call_arg_list = execute_hadoop_mock.call_args_list
       self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
                        call_arg_list[0][0][0].command)
-      self.assertEquals({'not_if': "su - user1 -c ' hadoop fs -ls /apps/test//*.files' >/dev/null 2>&1", 'user': 'user1', 'conf_dir': '/etc/hadoop/conf'},
+      self.assertEquals({'not_if': "su - user1 -c ' hadoop fs -ls /apps/test//*.files' >/dev/null 2>&1", 'user': 'user1', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'},
                         call_arg_list[0][0][0].arguments)
       self.assertEquals('fs -chown user1 /apps/test//*.files', call_arg_list[1][0][0].command)
-      self.assertEquals({'user': 'hdfs', 'conf_dir': '/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
+      self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
 
 
   @patch("resource_management.libraries.providers.execute_hadoop.ExecuteHadoopProvider")
@@ -57,9 +57,9 @@ class TestCopyFromLocal(TestCase):
       call_arg_list = execute_hadoop_mock.call_args_list
       self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
                        call_arg_list[0][0][0].command)
-      self.assertEquals({'not_if': "su - user1 -c ' hadoop fs -ls /apps/test//*.files' >/dev/null 2>&1", 'user': 'user1', 'conf_dir': '/etc/hadoop/conf'},
+      self.assertEquals({'not_if': "su - user1 -c ' hadoop fs -ls /apps/test//*.files' >/dev/null 2>&1", 'user': 'user1', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'},
                         call_arg_list[0][0][0].arguments)
       self.assertEquals('fs -chown user1:hdfs /apps/test//*.files', call_arg_list[1][0][0].command)
-      self.assertEquals({'user': 'hdfs', 'conf_dir': '/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
+      self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
 
 

+ 6 - 6
ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py

@@ -43,7 +43,7 @@ class TestExecuteHadoopResource(TestCase):
                         'tries': 1,
                         'user': 'user',
                         'try_sleep': 0,
-                        'environment': {'PATH': os.environ['PATH']}})
+                        'path': [None]})
 
 
   @patch("resource_management.core.providers.system.ExecuteProvider")
@@ -67,7 +67,7 @@ class TestExecuteHadoopResource(TestCase):
                         'tries': 1,
                         'user': 'user',
                         'try_sleep': 0,
-                        'environment': {'PATH': os.environ['PATH']}})
+                        'path': [None]})
 
 
   @patch("resource_management.core.providers.system.ExecuteProvider")
@@ -96,7 +96,7 @@ class TestExecuteHadoopResource(TestCase):
                         'tries': 2,
                         'user': 'user',
                         'try_sleep': 2,
-                        'environment': {'PATH': os.environ['PATH']}})
+                        'path': [None]})
 
 
   @patch("resource_management.core.providers.system.ExecuteProvider")
@@ -122,13 +122,13 @@ class TestExecuteHadoopResource(TestCase):
                         'tries': 1,
                         'user': 'user',
                         'try_sleep': 0,
-                        'environment': {'PATH': os.environ['PATH']}})
+                        'path': [None]})
       self.assertEqual(execute_mock.call_args_list[1][0][0].arguments,
                        {'logoutput': False,
                         'tries': 1,
                         'user': 'user',
                         'try_sleep': 0,
-                        'environment': {'PATH': os.environ['PATH']}})
+                        'path': [None]})
 
 
   @patch("resource_management.core.providers.system.ExecuteProvider")
@@ -181,7 +181,7 @@ class TestExecuteHadoopResource(TestCase):
                         'tries': 1,
                         'user': 'user',
                         'try_sleep': 0,
-                        'environment': {'PATH': os.environ['PATH']}})
+                        'path': [None]})
 
 
   @patch("resource_management.core.providers.system.ExecuteProvider")

+ 2 - 1
ambari-agent/src/test/python/resource_management/TestExecuteResource.py

@@ -90,7 +90,8 @@ class TestExecuteResource(TestCase):
       execute_resource = Execute('echo "1"',
                                  path=["/test/one", "test/two"]
       )
-    self.assertEqual(execute_resource.environment["PATH"], '/test/one:test/two')
+    expected_command = 'export PATH=$PATH:/test/one:test/two ; echo "1"'
+    self.assertEqual(popen_mock.call_args_list[0][0][0][3], expected_command)
 
   @patch('time.sleep')
   @patch.object(subprocess, "Popen")

+ 4 - 8
ambari-common/src/main/python/resource_management/core/providers/system.py

@@ -222,19 +222,15 @@ class ExecuteProvider(Provider):
         return
 
     Logger.debug("Executing %s" % self.resource)
-    
-    if self.resource.path != []:
-      if not self.resource.environment:
-        self.resource.environment = {}
-      
-      self.resource.environment['PATH'] = os.pathsep.join(self.resource.path) 
-    
+
     for i in range (0, self.resource.tries):
       try:
         shell.checked_call(self.resource.command, logoutput=self.resource.logoutput,
                             cwd=self.resource.cwd, env=self.resource.environment,
                             preexec_fn=_preexec_fn(self.resource), user=self.resource.user,
-                            wait_for_finish=self.resource.wait_for_finish, timeout=self.resource.timeout)
+                            wait_for_finish=self.resource.wait_for_finish,
+                            timeout=self.resource.timeout,
+                            path=self.resource.path)
         break
       except Fail as ex:
         if i == self.resource.tries-1: # last try

+ 15 - 9
ambari-common/src/main/python/resource_management/core/shell.py

@@ -19,6 +19,7 @@ limitations under the License.
 Ambari Agent
 
 """
+import os
 
 __all__ = ["checked_call", "call", "quote_bash_args"]
 
@@ -31,15 +32,15 @@ from exceptions import ExecuteTimeoutException
 from resource_management.core.logger import Logger
 
 def checked_call(command, logoutput=False, 
-         cwd=None, env=None, preexec_fn=None, user=None, wait_for_finish=True, timeout=None):
-  return _call(command, logoutput, True, cwd, env, preexec_fn, user, wait_for_finish, timeout)
+         cwd=None, env=None, preexec_fn=None, user=None, wait_for_finish=True, timeout=None, path=None):
+  return _call(command, logoutput, True, cwd, env, preexec_fn, user, wait_for_finish, timeout, path)
 
 def call(command, logoutput=False, 
-         cwd=None, env=None, preexec_fn=None, user=None, wait_for_finish=True, timeout=None):
-  return _call(command, logoutput, False, cwd, env, preexec_fn, user, wait_for_finish, timeout)
+         cwd=None, env=None, preexec_fn=None, user=None, wait_for_finish=True, timeout=None, path=None):
+  return _call(command, logoutput, False, cwd, env, preexec_fn, user, wait_for_finish, timeout, path)
             
 def _call(command, logoutput=False, throw_on_failure=True, 
-         cwd=None, env=None, preexec_fn=None, user=None, wait_for_finish=True, timeout=None):
+         cwd=None, env=None, preexec_fn=None, user=None, wait_for_finish=True, timeout=None, path=None):
   """
   Execute shell command
   
@@ -54,12 +55,17 @@ def _call(command, logoutput=False, throw_on_failure=True,
   if isinstance(command, (list, tuple)):
     command = ' '.join(quote_bash_args(x) for x in command)
 
+  if path:
+    export_path_command = "export PATH=$PATH" + os.pathsep + os.pathsep.join(path) + " ; "
+  else:
+    export_path_command = ""
+
   if user:
-    command = ["su", "-", user, "-c", command]
+    subprocess_command = ["su", "-", user, "-c", export_path_command + command]
   else:
-    command = ["/bin/bash","--login","-c", command]
+    subprocess_command = ["/bin/bash","--login","-c", export_path_command + command]
 
-  proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
+  proc = subprocess.Popen(subprocess_command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
                           cwd=cwd, env=env, shell=False,
                           preexec_fn=preexec_fn)
 
@@ -86,7 +92,7 @@ def _call(command, logoutput=False, throw_on_failure=True,
     Logger.info(out)
   
   if throw_on_failure and code:
-    err_msg = Logger.get_protected_text(("Execution of '%s' returned %d. %s") % (command[-1], code, out))
+    err_msg = Logger.get_protected_text(("Execution of '%s' returned %d. %s") % (command, code, out))
     raise Fail(err_msg)
   
   return code, out

+ 4 - 0
ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py

@@ -34,6 +34,7 @@ class CopyFromLocalProvider(Provider):
     mode = self.resource.mode
     hdfs_usr=self.resource.hdfs_user
     hadoop_conf_path = self.resource.hadoop_conf_dir
+    bin_dir = self.resource.hadoop_bin_dir
 
 
     if dest_file:
@@ -50,6 +51,7 @@ class CopyFromLocalProvider(Provider):
     ExecuteHadoop(copy_cmd,
                   not_if=unless_cmd,
                   user=owner,
+                  bin_dir=bin_dir,
                   conf_dir=hadoop_conf_path
                   )
 
@@ -66,6 +68,7 @@ class CopyFromLocalProvider(Provider):
 
       ExecuteHadoop(chown_cmd,
                     user=hdfs_usr,
+                    bin_dir=bin_dir,
                     conf_dir=hadoop_conf_path)
     pass
 
@@ -75,5 +78,6 @@ class CopyFromLocalProvider(Provider):
 
       ExecuteHadoop(chmod_cmd,
                     user=hdfs_usr,
+                    bin_dir=bin_dir,
                     conf_dir=hadoop_conf_path)
     pass

+ 1 - 5
ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py

@@ -28,7 +28,6 @@ class ExecuteHadoopProvider(Provider):
     kinit__path_local = self.resource.kinit_path_local
     keytab = self.resource.keytab
     conf_dir = self.resource.conf_dir
-    bin_dir = self.resource.bin_dir
     command = self.resource.command
     principal = self.resource.principal
     
@@ -42,14 +41,11 @@ class ExecuteHadoopProvider(Provider):
           user = self.resource.user
         )
 
-      path = os.environ['PATH']
-      if bin_dir is not None:
-        path += os.pathsep + bin_dir
 
       Execute (format("hadoop --config {conf_dir} {command}"),
         user        = self.resource.user,
         tries       = self.resource.tries,
         try_sleep   = self.resource.try_sleep,
         logoutput   = self.resource.logoutput,
-        environment = {'PATH' : path}
+        path        = self.resource.bin_dir
       )

+ 1 - 5
ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py

@@ -99,15 +99,11 @@ class HdfsDirectoryProvider(Provider):
     #for hadoop 2 we need to specify -p to create directories recursively
     parent_flag = '`rpm -q hadoop | grep -q "hadoop-1" || echo "-p"`'
 
-    path = os.environ['PATH']
-    if bin_dir is not None:
-      path += os.pathsep + bin_dir
-
     Execute(format('hadoop --config {hdp_conf_dir} fs -mkdir {parent_flag} {dir_list_str} && {chmod_cmd} && {chown_cmd}',
                    chmod_cmd=' && '.join(chmod_commands),
                    chown_cmd=' && '.join(chown_commands)),
             user=hdp_hdfs_user,
-            environment = {'PATH' : path},
+            path=bin_dir,
             not_if=format("su - {hdp_hdfs_user} -c 'export PATH=$PATH:{bin_dir} ; "
                           "hadoop --config {hdp_conf_dir} fs -ls {dir_list_str}'")
     )

+ 1 - 0
ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py

@@ -35,5 +35,6 @@ class CopyFromLocal(Resource):
   kinnit_if_needed = ResourceArgument(default='')
   hadoop_conf_dir = ResourceArgument(default='/etc/hadoop/conf')
   hdfs_user = ResourceArgument(default='hdfs')
+  hadoop_bin_dir = ResourceArgument(default='/usr/bin')
 
   actions = Resource.actions + ["run"]

+ 5 - 6
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py

@@ -27,16 +27,15 @@ rpm_version = default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
 if rpm_version is not None:
-  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
-  hadoop_conf_empty_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf.empty")
-  mapreduce_libs_path = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/*")
-  hadoop_libexec_dir = format("/usr/hdp/{rpm_version}/hadoop/libexec")
+  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
 else:
-  hadoop_conf_dir = "/etc/hadoop/conf"
-  hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
   mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
 
+hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+versioned_hdp_root = '/usr/hdp/current'
 #security params
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 #java params

+ 4 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py

@@ -39,6 +39,10 @@ def setup_hadoop_env():
          owner=tc_owner,
          content=InlineTemplate(params.hadoop_env_sh_template)
     )
+    if params.rpm_version is not None:
+      Execute(format('ln -s /usr/hdp/{rpm_version}* {versioned_hdp_root}'),
+              not_if=format('ls {versioned_hdp_root}')
+      )
 
 def setup_config():
   import params

+ 6 - 7
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py

@@ -28,20 +28,19 @@ rpm_version = default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
 if rpm_version is not None:
-  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
-  mapreduce_libs_path = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/*")
-  hadoop_libexec_dir = format("/usr/hdp/{rpm_version}/hadoop/libexec")
-  hadoop_lib_home = format("/usr/hdp/{rpm_version}/hadoop/lib")
-  hadoop_bin = format("/usr/hdp/{rpm_version}/hadoop/sbin")
-  hadoop_home = format('/usr/hdp/{rpm_version}/hadoop')
+  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
+  hadoop_lib_home = "/usr/hdp/current/hadoop/lib"
+  hadoop_bin = "/usr/hdp/current/hadoop/sbin"
+  hadoop_home = '/usr/hdp/current/hadoop'
 else:
-  hadoop_conf_dir = "/etc/hadoop/conf"
   mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
   hadoop_lib_home = "/usr/lib/hadoop/lib"
   hadoop_bin = "/usr/lib/hadoop/sbin"
   hadoop_home = '/usr'
 
+hadoop_conf_dir = "/etc/hadoop/conf"
 #security params
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py

@@ -30,7 +30,7 @@ def flume(action = None):
     for n in find_expected_agent_names():
       os.unlink(os.path.join(params.flume_conf_dir, n, 'ambari-meta.json'))
 
-    Directory(params.flume_conf_dir)
+    Directory(params.flume_conf_dir, recursive=True)
     Directory(params.flume_log_dir, owner=params.flume_user)
 
     flume_agents = {}

+ 2 - 4
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py

@@ -31,13 +31,11 @@ rpm_version = default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
 if rpm_version is not None:
-  flume_conf_dir = format('/usr/hdp/{rpm_version}/etc/flume/conf')
-  flume_bin = format('/usr/hdp/{rpm_version}/flume/bin/flume-ng')
-
+  flume_bin = '/usr/hdp/current/flume/bin/flume-ng'
 else:
-  flume_conf_dir = '/etc/flume/conf'
   flume_bin = '/usr/bin/flume-ng'
 
+flume_conf_dir = '/etc/flume/conf'
 java_home = config['hostLevelParams']['java_home']
 flume_log_dir = '/var/log/flume'
 flume_run_dir = '/var/run/flume'

+ 7 - 9
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py

@@ -36,22 +36,20 @@ if rpm_version is not None:
 
 #hadoop params
 if rpm_version is not None:
-  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
-  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
-  hbase_conf_dir = format('/usr/hdp/{rpm_version}/etc/hbase/conf')
-  daemon_script = format('/usr/hdp/{rpm_version}/hbase/bin/hbase-daemon.sh')
-  region_mover = format('/usr/hdp/{rpm_version}/hbase/bin/region_mover.rb')
-  region_drainer = format('/usr/hdp/{rpm_version}hbase/bin/draining_servers.rb')
-  hbase_cmd = format('/usr/hdp/{rpm_version}/hbase/bin/hbase')
+  hadoop_bin_dir = format("/usr/hdp/current/hadoop/bin")
+  daemon_script = format('/usr/hdp/current/hbase/bin/hbase-daemon.sh')
+  region_mover = format('/usr/hdp/current/hbase/bin/region_mover.rb')
+  region_drainer = format('/usr/hdp/currenthbase/bin/draining_servers.rb')
+  hbase_cmd = format('/usr/hdp/current/hbase/bin/hbase')
 else:
-  hadoop_conf_dir = "/etc/hadoop/conf"
   hadoop_bin_dir = "/usr/bin"
-  hbase_conf_dir = "/etc/hbase/conf"
   daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
   region_mover = "/usr/lib/hbase/bin/region_mover.rb"
   region_drainer = "/usr/lib/hbase/bin/draining_servers.rb"
   hbase_cmd = "/usr/lib/hbase/bin/hbase"
 
+hadoop_conf_dir = "/etc/hadoop/conf"
+hbase_conf_dir = "/etc/hbase/conf"
 hbase_excluded_hosts = config['commandParams']['excluded_hosts']
 hbase_drain_only = config['commandParams']['mark_draining_only']
 hbase_included_hosts = config['commandParams']['included_hosts']

+ 8 - 10
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py

@@ -29,21 +29,19 @@ rpm_version = default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
 if rpm_version is not None:
-  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
-  hadoop_conf_empty_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf.empty")
-  mapreduce_libs_path = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/*")
-  hadoop_libexec_dir = format("/usr/hdp/{rpm_version}/hadoop/libexec")
-  hadoop_bin = format("/usr/hdp/{rpm_version}/hadoop/sbin")
-  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
-  limits_conf_dir = format("/usr/hdp/{rpm_version}/etc/security/limits.d")
+  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
+  hadoop_bin = "/usr/hdp/current/hadoop/sbin"
+  hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"
 else:
-  hadoop_conf_dir = "/etc/hadoop/conf"
-  hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
   mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
   hadoop_bin = "/usr/lib/hadoop/sbin"
   hadoop_bin_dir = "/usr/bin"
-  limits_conf_dir = "/etc/security/limits.d"
+
+hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+limits_conf_dir = "/etc/security/limits.d"
 
 execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir
 ulimit_cmd = "ulimit -c unlimited; "

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/service_check.py

@@ -33,7 +33,7 @@ class HdfsServiceCheck(Script):
 
     create_dir_cmd = format("fs -mkdir {dir}")
     chmod_command = format("fs -chmod 777 {dir}")
-    test_dir_exists = format("su - {smoke_user} -c 'hadoop --config {hadoop_conf_dir} fs -test -e {dir}'")
+    test_dir_exists = format("su - {smoke_user} -c '{hadoop_bin_dir}/hadoop --config {hadoop_conf_dir} fs -test -e {dir}'")
     cleanup_cmd = format("fs -rm {tmp_file}")
     #cleanup put below to handle retries; if retrying there wil be a stale file
     #that needs cleanup; exit code is fn of second command

+ 2 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat.py

@@ -26,12 +26,14 @@ def hcat():
   import params
 
   Directory(params.hive_conf_dir,
+            recursive=True,
             owner=params.hcat_user,
             group=params.user_group,
   )
 
 
   Directory(params.hcat_conf_dir,
+            recursive=True,
             owner=params.hcat_user,
             group=params.user_group,
   )

+ 4 - 6
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py

@@ -44,8 +44,7 @@ def hcat_service_check():
             tries=3,
             user=params.smokeuser,
             try_sleep=5,
-            path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
-            environment = {'PATH' : params.execute_path},
+            path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin', params.execute_path],
             logoutput=True)
 
     if params.security_enabled:
@@ -57,7 +56,7 @@ def hcat_service_check():
                     kinit_path_local=params.kinit_path_local,
                     keytab=params.hdfs_user_keytab,
                     principal=params.hdfs_principal_name,
-                    bin_dir=params.hive_bin
+                    bin_dir=params.execute_path
       )
     else:
       ExecuteHadoop(test_cmd,
@@ -67,7 +66,7 @@ def hcat_service_check():
                     security_enabled=params.security_enabled,
                     kinit_path_local=params.kinit_path_local,
                     keytab=params.hdfs_user_keytab,
-                    bin_dir=params.hive_bin
+                    bin_dir=params.execute_path
       )
 
     cleanup_cmd = format("{kinit_cmd} {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} cleanup")
@@ -75,8 +74,7 @@ def hcat_service_check():
     Execute(cleanup_cmd,
             tries=3,
             user=params.smokeuser,
-            environment = {'PATH' : params.execute_path },
             try_sleep=5,
-            path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+            path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin', params.execute_path],
             logoutput=True
     )

+ 2 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py

@@ -72,6 +72,7 @@ def install_tez_jars():
                       dest_file=dest_file,
                       kinnit_if_needed=kinit_if_needed,
                       hdfs_user=params.hdfs_user,
+                      hadoop_bin_dir=params.hadoop_bin_dir,
                       hadoop_conf_dir=params.hadoop_conf_dir
         )
 
@@ -82,6 +83,7 @@ def install_tez_jars():
                     dest_dir=lib_dir_path,
                     kinnit_if_needed=kinit_if_needed,
                     hdfs_user=params.hdfs_user,
+                    hadoop_bin_dir=params.hadoop_bin_dir,
                     hadoop_conf_dir=params.hadoop_conf_dir
       )
     pass

+ 28 - 42
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py

@@ -33,56 +33,52 @@ hdp_stack_version = config['hostLevelParams']['stack_version']
 
 #hadoop params
 if rpm_version is not None:
-  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
-  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
-  hadoop_home = format('/usr/hdp/{rpm_version}/hadoop')
-  hadoop_streeming_jars = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/hadoop-streaming-*.jar")
-  hive_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive/conf')
-  hive_client_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive/conf')
-  hive_server_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive/conf.server')
-  hive_bin = format('/usr/hdp/{rpm_version}/hive/bin')
-  hive_lib = format('/usr/hdp/{rpm_version}/hive/lib')
-  tez_local_api_jars = format('/usr/hdp/{rpm_version}/tez/tez*.jar')
-  tez_local_lib_jars = format('/usr/hdp/{rpm_version}/tez/lib/*.jar')
-
-  if str(hdp_stack_version).startswith('2.0'):
-    hcat_conf_dir = format('/usr/hdp/{rpm_version}/etc/hcatalog/conf')
-    config_dir = format('/usr/hdp/{rpm_version}/etc/hcatalog/conf')
-    hcat_lib = format('/usr/hdp/{rpm_version}/hive/hcatalog/share/hcatalog')
-    webhcat_bin_dir = format('/usr/hdp/{rpm_version}/hive/hcatalog/sbin')
-  # for newer versions
-  else:
-    hcat_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive-hcatalog/conf')
-    config_dir = format('/usr/hdp/{rpm_version}/etc/hive-webhcat/conf')
-    hcat_lib = format('/usr/hdp/{rpm_version}/hive/hive-hcatalog/share/hcatalog')
-    webhcat_bin_dir = format('/usr/hdp/{rpm_version}/hive/hive-hcatalog/sbin')
+  hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"
+  hadoop_home = '/usr/hdp/current/hadoop'
+  hadoop_streeming_jars = "/usr/hdp/current/hadoop-mapreduce/hadoop-streaming-*.jar"
+  hive_bin = '/usr/hdp/current/hive/bin'
+  hive_lib = '/usr/hdp/current/hive/lib'
+  tez_local_api_jars = '/usr/hdp/current/tez/tez*.jar'
+  tez_local_lib_jars = '/usr/hdp/current/tez/lib/*.jar'
+  tez_tar_file = "/usr/hdp/current/tez/lib/tez*.tar.gz"
+
+  hcat_lib = '/usr/hdp/current/hive/hive-hcatalog/share/hcatalog'
+  webhcat_bin_dir = '/usr/hdp/current/hive/hive-hcatalog/sbin'
 
 else:
-  hadoop_conf_dir = "/etc/hadoop/conf"
   hadoop_bin_dir = "/usr/bin"
   hadoop_home = '/usr'
   hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
-  hive_conf_dir = "/etc/hive/conf"
   hive_bin = '/usr/lib/hive/bin'
   hive_lib = '/usr/lib/hive/lib/'
-  hive_client_conf_dir = "/etc/hive/conf"
-  hive_server_conf_dir = '/etc/hive/conf.server'
   tez_local_api_jars = '/usr/lib/tez/tez*.jar'
   tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
+  tez_tar_file = "/usr/lib/tez/tez*.tar.gz"
 
   if str(hdp_stack_version).startswith('2.0'):
-    hcat_conf_dir = '/etc/hcatalog/conf'
-    config_dir = '/etc/hcatalog/conf'
     hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
     webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
   # for newer versions
   else:
-    hcat_conf_dir = '/etc/hive-hcatalog/conf'
-    config_dir = '/etc/hive-webhcat/conf'
     hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
     webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
 
-execute_path = os.environ['PATH'] + os.pathsep + hive_bin
+hadoop_conf_dir = "/etc/hadoop/conf"
+hive_conf_dir = "/etc/hive/conf"
+hive_client_conf_dir = "/etc/hive/conf"
+hive_server_conf_dir = '/etc/hive/conf.server'
+
+
+
+if str(hdp_stack_version).startswith('2.0'):
+  hcat_conf_dir = '/etc/hcatalog/conf'
+  config_dir = '/etc/hcatalog/conf'
+# for newer versions
+else:
+  hcat_conf_dir = '/etc/hive-hcatalog/conf'
+  config_dir = '/etc/hive-webhcat/conf'
+
+execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + hadoop_bin_dir
 hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
 hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
 
@@ -224,8 +220,6 @@ if System.get_instance().os_family == "ubuntu":
 else:
   mysql_configname = '/etc/my.cnf'
 
-tez_tar_file = "/usr/lib/tez/tez*.tar.gz"
-
 # Hive security
 hive_authorization_enabled = config['configurations']['hive-site']['hive.security.authorization.enabled']
 
@@ -242,14 +236,6 @@ else:
 ########### WebHCat related params #####################
 ########################################################
 
-if str(config['hostLevelParams']['stack_version']).startswith('2.0'):
-  config_dir = '/etc/hcatalog/conf'
-  webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
-# for newer versions
-else:
-  config_dir = '/etc/hive-webhcat/conf'
-  webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
-
 webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
 templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
 templeton_pid_dir = status_params.hcat_pid_dir

+ 4 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py

@@ -56,6 +56,7 @@ def webhcat():
             recursive=True)
 
   Directory(params.config_dir,
+            recursive=True,
             owner=params.webhcat_user,
             group=params.user_group)
 
@@ -90,6 +91,7 @@ def webhcat():
                 dest_dir=params.webhcat_apps_dir,
                 kinnit_if_needed=kinit_if_needed,
                 hdfs_user=params.hdfs_user,
+                hadoop_bin_dir=params.hadoop_bin_dir,
                 hadoop_conf_dir=params.hadoop_conf_dir
   )
 
@@ -99,6 +101,7 @@ def webhcat():
                 dest_dir=params.webhcat_apps_dir,
                 kinnit_if_needed=kinit_if_needed,
                 hdfs_user=params.hdfs_user,
+                hadoop_bin_dir=params.hadoop_bin_dir,
                 hadoop_conf_dir=params.hadoop_conf_dir
   )
 
@@ -108,5 +111,6 @@ def webhcat():
                 dest_dir=params.webhcat_apps_dir,
                 kinnit_if_needed=kinit_if_needed,
                 hdfs_user=params.hdfs_user,
+                hadoop_bin_dir=params.hadoop_bin_dir,
                 hadoop_conf_dir=params.hadoop_conf_dir
   )

+ 5 - 7
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py

@@ -30,18 +30,16 @@ rpm_version = default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
 if rpm_version is not None:
-  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
-  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
-  hadoop_home = format('/usr/hdp/{rpm_version}/hadoop')
-  pig_conf_dir = format('/usr/hdp/{rpm_version}/etc/pig/conf')
-  pig_bin_dir = format('/usr/hdp/{rpm_version}/pig/bin')
+  hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"
+  hadoop_home = '/usr/hdp/current/hadoop'
+  pig_bin_dir = '/usr/hdp/current/pig/bin'
 else:
-  hadoop_conf_dir = "/etc/hadoop/conf"
   hadoop_bin_dir = "/usr/bin"
   hadoop_home = '/usr'
-  pig_conf_dir = "/etc/pig/conf"
   pig_bin_dir = ""
 
+hadoop_conf_dir = "/etc/hadoop/conf"
+pig_conf_dir = "/etc/pig/conf"
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 smokeuser = config['configurations']['cluster-env']['smokeuser']

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/pig.py

@@ -26,6 +26,7 @@ def pig():
   import params
 
   Directory( params.pig_conf_dir,
+    recursive = True,
     owner = params.hdfs_user,
     group = params.user_group
   )

+ 5 - 6
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py

@@ -26,18 +26,17 @@ rpm_version = default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
 if rpm_version is not None:
-  zoo_conf_dir = format('/usr/hdp/{rpm_version}/etc/zookeeper')
-  sqoop_conf_dir = format('/usr/hdp/{rpm_version}/sqoop/conf')
-  sqoop_lib = format('/usr/hdp/{rpm_version}/sqoop/lib')
-  hbase_home = format('/usr/hdp/{rpm_version}/hbase')
-  hive_home = format('/usr/hdp/{rpm_version}/hive')
+  sqoop_conf_dir = '/usr/hdp/current/etc/sqoop/conf'
+  sqoop_lib = '/usr/hdp/current/sqoop/lib'
+  hbase_home = '/usr/hdp/current/hbase'
+  hive_home = '/usr/hdp/current/hive'
 else:
-  zoo_conf_dir = "/etc/zookeeper"
   sqoop_conf_dir = "/usr/lib/sqoop/conf"
   sqoop_lib = "/usr/lib/sqoop/lib"
   hbase_home = "/usr"
   hive_home = "/usr"
 
+zoo_conf_dir = "/etc/zookeeper"
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
 user_group = config['configurations']['cluster-env']['user_group']

+ 4 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/mapred_service_check.py

@@ -46,6 +46,7 @@ class MapReduce2ServiceCheck(Script):
                   tries=1,
                   try_sleep=5,
                   user=params.smokeuser,
+                  bin_dir=params.execute_path,
                   conf_dir=params.hadoop_conf_dir
     )
 
@@ -53,6 +54,7 @@ class MapReduce2ServiceCheck(Script):
                   tries=1,
                   try_sleep=5,
                   user=params.smokeuser,
+                  bin_dir=params.execute_path,
                   conf_dir=params.hadoop_conf_dir
     )
 
@@ -60,12 +62,14 @@ class MapReduce2ServiceCheck(Script):
                   tries=1,
                   try_sleep=5,
                   user=params.smokeuser,
+                  bin_dir=params.execute_path,
                   conf_dir=params.hadoop_conf_dir,
                   logoutput=True
     )
 
     ExecuteHadoop(test_cmd,
                   user=params.smokeuser,
+                  bin_dir=params.execute_path,
                   conf_dir=params.hadoop_conf_dir
     )
 

+ 11 - 13
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py

@@ -32,29 +32,27 @@ rpm_version = default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
 if rpm_version is not None:
-  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
-  hadoop_libexec_dir = format("/usr/hdp/{rpm_version}/hadoop/libexec")
-  hadoop_bin = format("/usr/hdp/{rpm_version}/hadoop/sbin")
-  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
-  limits_conf_dir = format("/usr/hdp/{rpm_version}/etc/security/limits.d")
-  hadoop_yarn_home = format('/usr/hdp/{rpm_version}/hadoop-yarn')
-  hadoop_mapred2_jar_location = format('/usr/hdp/{rpm_version}/hadoop-mapreduce')
-  mapred_bin = format('/usr/hdp/{rpm_version}/hadoop-mapreduce/sbin')
-  yarn_bin = format('/usr/hdp/{rpm_version}/hadoop-yarn/sbin')
-  yarn_container_bin = format('/usr/hdp/{rpm_version}/hadoop-yarn/bin')
+  hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
+  hadoop_bin = "/usr/hdp/current/hadoop/sbin"
+  hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"
+  hadoop_yarn_home = '/usr/hdp/current/hadoop-yarn'
+  hadoop_mapred2_jar_location = '/usr/hdp/current/hadoop-mapreduce'
+  mapred_bin = '/usr/hdp/current/hadoop-mapreduce/sbin'
+  yarn_bin = '/usr/hdp/current/hadoop-yarn/sbin'
+  yarn_container_bin = '/usr/hdp/current/hadoop-yarn/bin'
 else:
-  hadoop_conf_dir = "/etc/hadoop/conf"
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
   hadoop_bin = "/usr/lib/hadoop/sbin"
   hadoop_bin_dir = "/usr/bin"
-  limits_conf_dir = "/etc/security/limits.d"
   hadoop_yarn_home = '/usr/lib/hadoop-yarn'
   hadoop_mapred2_jar_location = "/usr/lib/hadoop-mapreduce"
   mapred_bin = "/usr/lib/hadoop-mapreduce/sbin"
   yarn_bin = "/usr/lib/hadoop-yarn/sbin"
   yarn_container_bin = "/usr/lib/hadoop-yarn/bin"
 
-execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir
+hadoop_conf_dir = "/etc/hadoop/conf"
+limits_conf_dir = "/etc/security/limits.d"
+execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir + os.pathsep + yarn_container_bin
 
 ulimit_cmd = "ulimit -c unlimited;"
 

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service_check.py

@@ -60,7 +60,7 @@ class ServiceCheck(Script):
     )
 
     Execute(run_yarn_check_cmd,
-            environment= {'PATH' : params.execute_path },
+            path=params.execute_path,
             user=params.smokeuser
     )
 

+ 3 - 4
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py

@@ -31,14 +31,13 @@ rpm_version = default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
 if rpm_version is not None:
-  config_dir = format('/usr/hdp/{rpm_version}/etc/zookeeper/conf')
-  zk_bin = format('/usr/hdp/{rpm_version}/zookeeper/bin')
-  smoke_script = format('/usr/hdp/{rpm_version}/zookeeper/bin/zkCli.sh')
+  zk_bin = '/usr/hdp/current/zookeeper/bin'
+  smoke_script = '/usr/hdp/current/zookeeper/bin/zkCli.sh'
 else:
-  config_dir = "/etc/zookeeper/conf"
   zk_bin = '/usr/lib/zookeeper/bin'
   smoke_script = "/usr/lib/zookeeper/bin/zkCli.sh"
 
+config_dir = "/etc/zookeeper/conf"
 zk_user =  config['configurations']['zookeeper-env']['zk_user']
 hostname = config['hostname']
 user_group = config['configurations']['cluster-env']['user_group']

+ 5 - 7
ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py

@@ -28,18 +28,16 @@ rpm_version = default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
 if rpm_version is not None:
-  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
-  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
-  falcon_webapp_dir = format("/usr/hdp/{rpm_version}/falcon/webapp")
-  falcon_home = format("/usr/hdp/{rpm_version}/falcon")
-  falcon_conf_dir = format("/usr/hdp/{rpm_version}/falcon/conf")
+  hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"
+  falcon_webapp_dir = "/usr/hdp/current/falcon/webapp"
+  falcon_home = "/usr/hdp/current/falcon"
 else:
-  hadoop_conf_dir = "/etc/hadoop/conf"
   hadoop_bin_dir = "/usr/bin"
   falcon_webapp_dir = '/var/lib/falcon/webapp'
   falcon_home = '/usr/lib/falcon'
-  falcon_conf_dir = '/etc/falcon/conf'
 
+hadoop_conf_dir = "/etc/hadoop/conf"
+falcon_conf_dir = '/etc/falcon/conf'
 oozie_user = config['configurations']['oozie-env']['oozie_user']
 falcon_user = config['configurations']['falcon-env']['falcon_user']
 smoke_user =  config['configurations']['cluster-env']['smokeuser']

+ 10 - 1
ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/params.py

@@ -24,6 +24,16 @@ import status_params
 # server configurations
 config = Script.get_config()
 
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  rest_lib_dir = format('/usr/hdp/current/storm/contrib/storm-rest')
+
+else:
+  rest_lib_dir = "/usr/lib/storm/contrib/storm-rest"
+
 storm_user = config['configurations']['storm-env']['storm_user']
 log_dir = config['configurations']['storm-env']['storm_log_dir']
 pid_dir = status_params.pid_dir
@@ -37,7 +47,6 @@ nimbus_host = config['configurations']['storm-site']['nimbus.host']
 rest_api_port = "8745"
 rest_api_admin_port = "8746"
 rest_api_conf_file = format("{conf_dir}/config.yaml")
-rest_lib_dir = default("/configurations/storm-env/rest_lib_dir","/usr/lib/storm/contrib/storm-rest")
 storm_env_sh_template = config['configurations']['storm-env']['content']
 
 if 'ganglia_server_host' in config['clusterHostInfo'] and \

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.2/services/FALCON/metainfo.xml

@@ -27,7 +27,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>falcon_2_9_9_9_117</name>
+              <name>falcon_2_9_9_9_*</name>
             </package>
           </packages>
         </osSpecific>

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.2/services/FLUME/metainfo.xml

@@ -28,7 +28,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>flume_2_9_9_9_117</name>
+              <name>flume_2_9_9_9_*</name>
             </package>
           </packages>
         </osSpecific>

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml

@@ -28,7 +28,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>hbase_2_9_9_9_117</name>
+              <name>hbase_2_9_9_9_*</name>
             </package>
           </packages>
         </osSpecific>

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml

@@ -23,7 +23,7 @@
 <configuration>
   <property>
     <name>rpm_version</name>
-    <value>2.9.9.9-117</value>
+    <value>2.9.9.9</value>
     <description>Hadoop RPM version</description>
   </property>
 </configuration>

+ 0 - 34
ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hdfs-site.xml

@@ -1,34 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-<!-- Put site-specific property overrides in this file. -->
-
-<configuration supports_final="true">
-
-  <property>
-    <name>dfs.hosts.exclude</name>
-    <value>/usr/hdp/2.9.9.9-117/etc/hadoop/conf/dfs.exclude</value>
-    <description>Names a file that contains a list of hosts that are
-      not permitted to connect to the namenode.  The full pathname of the
-      file must be specified.  If the value is empty, no hosts are
-      excluded.</description>
-  </property>
-
-</configuration>

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml

@@ -28,7 +28,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>hadoop_2_9_9_9_117</name>
+              <name>hadoop_2_9_9_9_*</name>
             </package>
             <package>
               <name>hadoop-lzo</name>
@@ -52,7 +52,7 @@
               <name>hadoop-lzo-native</name>
             </package>
             <package>
-              <name>hadoop_2_9_9_9_117-libhdfs</name>
+              <name>hadoop_2_9_9_9_*-libhdfs</name>
             </package>
             <package>
               <name>ambari-log4j</name>

+ 4 - 10
ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml

@@ -23,35 +23,29 @@ limitations under the License.
 
 <configuration supports_final="true">
 
-  <property>
-    <name>templeton.hadoop.conf.dir</name>
-    <value>/usr/hdp/2.9.9.9-117/etc/hadoop/conf</value>
-    <description>The path to the Hadoop configuration.</description>
-  </property>
-
   <property>
     <name>templeton.jar</name>
-    <value>/usr/hdp/2.9.9.9-117/hcatalog/share/webhcat/svr/webhcat.jar</value>
+    <value>/usr/hdp/current/hcatalog/share/webhcat/svr/webhcat.jar</value>
     <description>The path to the Templeton jar file.</description>
   </property>
 
   <property>
     <name>templeton.libjars</name>
-    <value>/usr/hdp/2.9.9.9-117/zookeeper/zookeeper.jar</value>
+    <value>/usr/hdp/current/zookeeper/zookeeper.jar</value>
     <description>Jars to add the the classpath.</description>
   </property>
 
 
   <property>
     <name>templeton.hadoop</name>
-    <value>/usr/hdp/2.9.9.9-117/hadoop/bin/hadoop</value>
+    <value>/usr/hdp/current/hadoop/bin/hadoop</value>
     <description>The path to the Hadoop executable.</description>
   </property>
 
 
   <property>
     <name>templeton.hcat</name>
-    <value>/usr/hdp/2.9.9.9-117/hive/bin/hcat</value>
+    <value>/usr/hdp/current/hive/bin/hcat</value>
     <description>The path to the hcatalog executable.</description>
   </property>
 

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml

@@ -26,16 +26,16 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>hive_2_9_9_9_117</name>
+              <name>hive_2_9_9_9_*</name>
             </package>
             <package>
               <name>mysql-connector-java</name>
             </package>
             <package>
-              <name>hive_2_9_9_9_117-hcatalog</name>
+              <name>hive_2_9_9_9_*-hcatalog</name>
             </package>
             <package>
-              <name>hive_2_9_9_9_117-webhcat</name>
+              <name>hive_2_9_9_9_*-webhcat</name>
             </package>
             <package>
               <name>webhcat-tar-hive</name>

+ 0 - 13
ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml

@@ -19,19 +19,6 @@
 
 <configuration supports_final="true">
 
-  <property>
-    <name>oozie.service.HadoopAccessorService.hadoop.configurations</name>
-    <value>*=/usr/hdp/2.9.9.9-117/etc/hadoop/conf</value>
-    <description>
-      Comma separated AUTHORITY=HADOOP_CONF_DIR, where AUTHORITY is the HOST:PORT of
-      the Hadoop service (JobTracker, HDFS). The wildcard '*' configuration is
-      used when there is no exact match for an authority. The HADOOP_CONF_DIR contains
-      the relevant Hadoop *-site.xml files. If the path is relative is looked within
-      the Oozie configuration directory; though the path can be absolute (i.e. to point
-      to Hadoop client conf/ directories in the local filesystem.
-    </description>
-  </property>
-
   <property>
     <name>oozie.service.coord.check.maximum.frequency</name>
     <value>false</value>

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml

@@ -26,13 +26,13 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>oozie_2_9_9_9_117</name>
+              <name>oozie_2_9_9_9_*</name>
             </package>
             <package>
-              <name>oozie_2_9_9_9_117-client</name>
+              <name>oozie_2_9_9_9_*-client</name>
             </package>
             <package>
-              <name>falcon_2_9_9_9_117</name>
+              <name>falcon_2_9_9_9_*</name>
             </package>
             <package>
               <name>zip</name>

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.2/services/PIG/metainfo.xml

@@ -27,7 +27,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>pig_2_9_9_9_117</name>
+              <name>pig_2_9_9_9_*</name>
             </package>
           </packages>
         </osSpecific>

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/SLIDER/package/scripts/params.py

@@ -28,8 +28,8 @@ rpm_version = default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
 if rpm_version is not None:
-  slider_conf_dir = format('/usr/lib/{rpm_version}/slider/conf')
-  slider_bin_dir = format('/usr/lib/{rpm_version}/slider/bin')
+  slider_conf_dir = '/usr/lib/current/slider/conf'
+  slider_bin_dir = '/usr/lib/current/slider/bin'
 else:
   slider_conf_dir = "/usr/lib/slider/conf"
   slider_bin_dir = "/usr/lib/slider/bin"

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.2/services/SQOOP/metainfo.xml

@@ -26,7 +26,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>sqoop_2_9_9_9_117</name>
+              <name>sqoop_2_9_9_9_*</name>
             </package>
             <package>
               <name>mysql-connector-java</name>

+ 0 - 29
ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-env.xml

@@ -1,29 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration>
-  <property>
-    <name>rest_lib_dir</name>
-    <value>/usr/lib/storm/external/storm-rest</value>
-    <description></description>
-  </property>
-</configuration>

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-site.xml

@@ -25,13 +25,13 @@
 
   <property>
     <name>nimbus.childopts</name>
-    <value>-Xmx1024m -Djava.security.auth.login.config=/etc/storm/conf/storm_jaas.conf -javaagent:/usr/lib/storm/external/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8649,wireformat31x=true,mode=multicast,config=/usr/lib/storm/external/storm-jmxetric/conf/jmxetric-conf.xml,process=Nimbus_JVM</value>
+    <value>-Xmx1024m -Djava.security.auth.login.config=/etc/storm/conf/storm_jaas.conf -javaagent:/usr/hdp/current/storm/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8649,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Nimbus_JVM</value>
     <description>This parameter is used by the storm-deploy project to configure the jvm options for the nimbus daemon.</description>
   </property>
 
   <property>
     <name>worker.childopts</name>
-    <value>-Xmx768m -javaagent:/usr/lib/storm/external/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8650,wireformat31x=true,mode=multicast,config=/usr/lib/storm/external/storm-jmxetric/conf/jmxetric-conf.xml,process=Worker_%ID%_JVM</value>
+    <value>-Xmx768m -javaagent:/usr/hdp/current/storm/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8650,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Worker_%ID%_JVM</value>
     <description>The jvm opts provided to workers launched by this supervisor. All \"%ID%\" substrings are replaced with an identifier for this worker.</description>
   </property>
 
@@ -45,7 +45,7 @@
 
   <property>
     <name>supervisor.childopts</name>
-    <value>-Xmx256m -Djava.security.auth.login.config=/etc/storm/conf/storm_jaas.conf -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.port=56431 -javaagent:/usr/lib/storm/external/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8650,wireformat31x=true,mode=multicast,config=/usr/lib/storm/external/storm-jmxetric/conf/jmxetric-conf.xml,process=Supervisor_JVM</value>
+    <value>-Xmx256m -Djava.security.auth.login.config=/etc/storm/conf/storm_jaas.conf -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.port=56431 -javaagent:/usr/hdp/current/storm/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8650,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Supervisor_JVM</value>
     <description>This parameter is used by the storm-deploy project to configure the jvm options for the supervisor daemon.</description>
   </property>
 

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/metainfo.xml

@@ -28,7 +28,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>storm_2_9_9_9_117</name>
+              <name>storm_2_9_9_9_*</name>
             </package>
           </packages>
         </osSpecific>

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/metainfo.xml

@@ -28,7 +28,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>tez_2_9_9_9_117</name>
+              <name>tez_2_9_9_9_*</name>
             </package>
           </packages>
         </osSpecific>

+ 0 - 43
ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/metainfo.xml

@@ -1,43 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <version>0.14.0.2.9.9.9</version>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hive_2_9_9_9_117-webhcat</name>
-            </package>
-            <package>
-              <name>webhcat-tar-hive</name>
-            </package>
-            <package>
-              <name>webhcat-tar-pig</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-
-    </service>
-  </services>
-</metainfo>

+ 10 - 1
ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml

@@ -24,7 +24,7 @@
 
   <property>
     <name>mapreduce.admin.user.env</name>
-    <value>LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/hdp/2.9.9.9-117/hadoop/lib/native/Linux-amd64-64</value>
+    <value>LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/hdp/current/hadoop/lib/native/Linux-amd64-64</value>
     <description>
       Additional execution environment entries for map and reduce task processes.
       This is not an additive property. You must preserve the original value if
@@ -32,5 +32,14 @@
     </description>
   </property>
 
+  <property>
+    <name>mapreduce.application.classpath</name>
+    <value>$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*,/usr/hdp/current/hadoop-mapreduce/,/usr/hdp/current/hadoop-mapreduce/lib,/usr/hdp/current/hadoop/</value>
+    <description>
+      CLASSPATH for MR applications. A comma-separated list of CLASSPATH
+      entries.
+    </description>
+  </property>
+
 
 </configuration>

+ 3 - 8
ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml

@@ -22,14 +22,9 @@
 <configuration supports_final="true" xmlns:xi="http://www.w3.org/2001/XInclude">
 
   <property>
-    <name>yarn.resourcemanager.nodes.exclude-path</name>
-    <value>/usr/hdp/2.9.9.9-117/etc/hadoop/conf/yarn.exclude</value>
-    <description>
-      Names a file that contains a list of hosts that are
-      not permitted to connect to the resource manager.  The full pathname of the
-      file must be specified.  If the value is empty, no hosts are
-      excluded.
-    </description>
+    <name>yarn.application.classpath</name>
+    <value>/etc/hadoop/conf,/usr/hdp/current/hadoop/*,/usr/hdp/current/hadoop/lib/*,/usr/hdp/current/hadoop-hdfs/*,/usr/hdp/current/hadoop-hdfs/lib/*,/usr/hdp/current/hadoop-yarn/*,/usr/hdp/current/hadoop-yarn/lib/*,/usr/hdp/current/hadoop-mapreduce/*,/usr/hdp/current/hadoop-mapreduce/lib/*</value>
+    <description>Classpath for typical applications.</description>
   </property>
 
 </configuration>

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/metainfo.xml

@@ -37,10 +37,10 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>hadoop_2_9_9_9_117-yarn</name>
+              <name>hadoop_2_9_9_9_*-yarn</name>
             </package>
             <package>
-              <name>hadoop_2_9_9_9_117-mapreduce</name>
+              <name>hadoop_2_9_9_9_*-mapreduce</name>
             </package>
           </packages>
         </osSpecific>
@@ -56,7 +56,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>hadoop_2_9_9_9_117-mapreduce</name>
+              <name>hadoop_2_9_9_9_*-mapreduce</name>
             </package>
           </packages>
         </osSpecific>

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.2/services/ZOOKEEPER/metainfo.xml

@@ -28,7 +28,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>zookeeper_2_9_9_9_117</name>
+              <name>zookeeper_2_9_9_9_*</name>
             </package>
           </packages>
         </osSpecific>

+ 1 - 1
ambari-server/src/test/python/stacks/1.3.2/HDFS/test_service_check.py

@@ -57,7 +57,7 @@ class TestServiceCheck(RMFTestCase):
         conf_dir = '/etc/hadoop/conf',
         bin_dir = '/usr/bin',
         logoutput = True,
-        not_if = 'su - ambari-qa -c \'hadoop --config /etc/hadoop/conf fs -test -e /tmp\'',
+        not_if = 'su - ambari-qa -c \'/usr/bin/hadoop --config /etc/hadoop/conf fs -test -e /tmp\'',
         try_sleep = 3,
         tries = 5,
         user = 'ambari-qa',

+ 2 - 2
ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py

@@ -157,7 +157,7 @@ class TestFlumeHandler(RMFTestCase):
 
   def assert_configure_default(self):
 
-    self.assertResourceCalled('Directory', '/etc/flume/conf')
+    self.assertResourceCalled('Directory', '/etc/flume/conf', recursive=True)
 
     self.assertResourceCalled('Directory', '/var/log/flume', owner = 'flume')
 
@@ -180,7 +180,7 @@ class TestFlumeHandler(RMFTestCase):
 
   def assert_configure_many(self):
 
-    self.assertResourceCalled('Directory', '/etc/flume/conf')
+    self.assertResourceCalled('Directory', '/etc/flume/conf', recursive=True)
 
     self.assertResourceCalled('Directory', '/var/log/flume', owner = 'flume')
 

+ 1 - 1
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py

@@ -56,7 +56,7 @@ class TestServiceCheck(RMFTestCase):
     self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp',
         conf_dir = '/etc/hadoop/conf',
         logoutput = True,
-        not_if = 'su - ambari-qa -c \'hadoop --config /etc/hadoop/conf fs -test -e /tmp\'',
+        not_if = 'su - ambari-qa -c \'/usr/bin/hadoop --config /etc/hadoop/conf fs -test -e /tmp\'',
         try_sleep = 3,
         tries = 5,
         bin_dir = '/usr/bin',

+ 4 - 0
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py

@@ -31,10 +31,12 @@ class TestHcatClient(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/hive/conf',
                               owner = 'hcat',
                               group = 'hadoop',
+                              recursive = True,
     )
     self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
       owner = 'hcat',
       group = 'hadoop',
+      recursive = True,
     )
     self.assertResourceCalled('Directory', '/var/run/webhcat',
       owner = 'hcat',
@@ -64,10 +66,12 @@ class TestHcatClient(RMFTestCase):
                          config_file="secured.json"
     )
     self.assertResourceCalled('Directory', '/etc/hive/conf',
+                              recursive = True,
                               owner = 'hcat',
                               group = 'hadoop',
     )
     self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
+      recursive = True,
       owner = 'hcat',
       group = 'hadoop',
     )

+ 4 - 5
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py

@@ -41,7 +41,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assert_configure_default()
     self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
         not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
-        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin",
+        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin",
                        'HADOOP_HOME' : '/usr'},
         user = 'hive',
     )
@@ -81,10 +81,9 @@ class TestHiveMetastore(RMFTestCase):
     )
 
     self.assert_configure_secured()
-    self.maxDiff = None
     self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
         not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
-        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin",
+        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin",
                        'HADOOP_HOME' : '/usr'},
         user = 'hive',
     )
@@ -202,7 +201,7 @@ class TestHiveMetastore(RMFTestCase):
     )
     self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
         creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
-        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
+        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"},
         path = ['/bin', '/usr/bin/'],
         not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
     )
@@ -329,7 +328,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
         creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
         path = ['/bin', '/usr/bin/'],
-        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
+        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"},
         not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
     )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',

+ 7 - 5
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py

@@ -85,6 +85,7 @@ class TestHiveServer(RMFTestCase):
                               dest_dir='/apps/tez/',
                               kinnit_if_needed='',
                               hadoop_conf_dir='/etc/hadoop/conf',
+                              hadoop_bin_dir='/usr/bin',
                               hdfs_user='hdfs',
                               dest_file=None
     )
@@ -94,13 +95,14 @@ class TestHiveServer(RMFTestCase):
                               owner='tez',
                               dest_dir='/apps/tez/lib/',
                               kinnit_if_needed='',
+                              hadoop_bin_dir='/usr/bin',
                               hadoop_conf_dir='/etc/hadoop/conf',
                               hdfs_user='hdfs'
     )
 
     self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
                               not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
-                              environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin",
+                              environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin",
                                              'HADOOP_HOME' : '/usr'},
                               user = 'hive'
     )
@@ -153,8 +155,8 @@ class TestHiveServer(RMFTestCase):
     self.assert_configure_secured()
     self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
                               not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
-                              environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin",
-                                             'HADOOP_HOME' : '/usr'},
+                              environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin",
+                                             'HADOOP_HOME': '/usr'},
                               user = 'hive'
     )
 
@@ -309,7 +311,7 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
         creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
         path = ['/bin', '/usr/bin/'],
-        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
+        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"},
         not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
     )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
@@ -466,7 +468,7 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
         creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
         path = ['/bin', '/usr/bin/'],
-        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
+        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"},
         not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
     )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',

+ 7 - 10
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py

@@ -40,10 +40,9 @@ class TestServiceCheck(RMFTestCase):
     )
     self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/hcatSmoke.sh hcatsmoke prepare',
                         logoutput = True,
-                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin', os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"],
                         tries = 3,
                         user = 'ambari-qa',
-                        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
                         try_sleep = 5,
     )
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /apps/hive/warehouse/hcatsmoke',
@@ -52,15 +51,14 @@ class TestServiceCheck(RMFTestCase):
                         conf_dir = '/etc/hadoop/conf',
                         keytab=UnknownConfigurationMock(),
                         kinit_path_local='/usr/bin/kinit',
-                        bin_dir = '/usr/lib/hive/bin',
+                        bin_dir = os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin",
                         security_enabled=False
     )
     self.assertResourceCalled('Execute', ' /tmp/hcatSmoke.sh hcatsmoke cleanup',
                         logoutput = True,
-                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin', os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"],
                         tries = 3,
                         user = 'ambari-qa',
-                        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
                         try_sleep = 5,
     )
     self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
@@ -87,12 +85,12 @@ class TestServiceCheck(RMFTestCase):
                         content = StaticFile('hcatSmoke.sh'),
                         mode = 0755,
     )
+    self.maxDiff = None
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/hcatSmoke.sh hcatsmoke prepare',
                         logoutput = True,
-                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin', os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"],
                         tries = 3,
                         user = 'ambari-qa',
-                        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
                         try_sleep = 5,
     )
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /apps/hive/warehouse/hcatsmoke',
@@ -102,15 +100,14 @@ class TestServiceCheck(RMFTestCase):
                         keytab='/etc/security/keytabs/hdfs.headless.keytab',
                         kinit_path_local='/usr/bin/kinit',
                         security_enabled=True,
-                        bin_dir = '/usr/lib/hive/bin',
+                        bin_dir = os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin",
                         principal='hdfs'
     )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa;  /tmp/hcatSmoke.sh hcatsmoke cleanup',
                         logoutput = True,
-                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin', os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"],
                         tries = 3,
                         user = 'ambari-qa',
-                        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
                         try_sleep = 5,
     )
     self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',

+ 8 - 0
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py

@@ -142,6 +142,7 @@ class TestWebHCatServer(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
                               owner = 'hcat',
                               group = 'hadoop',
+                              recursive = True,
                               )
     self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
                               owner = 'hcat',
@@ -161,6 +162,7 @@ class TestWebHCatServer(RMFTestCase):
                               dest_dir='/apps/webhcat',
                               kinnit_if_needed='',
                               hadoop_conf_dir='/etc/hadoop/conf',
+                              hadoop_bin_dir='/usr/bin',
                               hdfs_user='hdfs'
     )
     self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/pig.tar.gz',
@@ -169,6 +171,7 @@ class TestWebHCatServer(RMFTestCase):
                               dest_dir='/apps/webhcat',
                               kinnit_if_needed='',
                               hadoop_conf_dir='/etc/hadoop/conf',
+                              hadoop_bin_dir='/usr/bin',
                               hdfs_user='hdfs'
     )
     self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/hive.tar.gz',
@@ -176,6 +179,7 @@ class TestWebHCatServer(RMFTestCase):
                               mode=0755,
                               dest_dir='/apps/webhcat',
                               kinnit_if_needed='',
+                              hadoop_bin_dir='/usr/bin',
                               hadoop_conf_dir='/etc/hadoop/conf',
                               hdfs_user='hdfs'
     )
@@ -227,6 +231,7 @@ class TestWebHCatServer(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
                               owner = 'hcat',
                               group = 'hadoop',
+                              recursive = True,
                               )
     self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
                               owner = 'hcat',
@@ -250,6 +255,7 @@ class TestWebHCatServer(RMFTestCase):
                               dest_dir='/apps/webhcat',
                               kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
                               hadoop_conf_dir='/etc/hadoop/conf',
+                              hadoop_bin_dir='/usr/bin',
                               hdfs_user='hdfs'
     )
     self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/pig.tar.gz',
@@ -258,6 +264,7 @@ class TestWebHCatServer(RMFTestCase):
                               dest_dir='/apps/webhcat',
                               kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
                               hadoop_conf_dir='/etc/hadoop/conf',
+                              hadoop_bin_dir='/usr/bin',
                               hdfs_user='hdfs'
     )
     self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/hive.tar.gz',
@@ -266,5 +273,6 @@ class TestWebHCatServer(RMFTestCase):
                               dest_dir='/apps/webhcat',
                               kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
                               hadoop_conf_dir='/etc/hadoop/conf',
+                              hadoop_bin_dir='/usr/bin',
                               hdfs_user='hdfs'
     )

+ 2 - 0
ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py

@@ -30,6 +30,7 @@ class TestPigClient(RMFTestCase):
     )
 
     self.assertResourceCalled('Directory', '/etc/pig/conf',
+      recursive = True,
       owner = 'hdfs',
       group = 'hadoop'
     )
@@ -59,6 +60,7 @@ class TestPigClient(RMFTestCase):
     )
     
     self.assertResourceCalled('Directory', '/etc/pig/conf',
+      recursive = True,
       owner = 'hdfs',
       group = 'hadoop'
     )

+ 8 - 0
ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py

@@ -38,11 +38,13 @@ class TestServiceCheck(RMFTestCase):
                       try_sleep = 5,
                       tries = 1,
                       user = 'ambari-qa',
+                      bin_dir =  os.environ['PATH'] + os.pathsep + "/usr/bin" + os.pathsep + "/usr/lib/hadoop-yarn/bin",
                       conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('ExecuteHadoop', 'fs -put /etc/passwd /user/ambari-qa/mapredsmokeinput',
                       try_sleep = 5,
                       tries = 1,
+                      bin_dir =  os.environ['PATH'] + os.pathsep + "/usr/bin" + os.pathsep + "/usr/lib/hadoop-yarn/bin",
                       user = 'ambari-qa',
                       conf_dir = '/etc/hadoop/conf',
     )
@@ -50,11 +52,13 @@ class TestServiceCheck(RMFTestCase):
                       logoutput = True,
                       try_sleep = 5,
                       tries = 1,
+                      bin_dir =  os.environ['PATH'] + os.pathsep + "/usr/bin" + os.pathsep + "/usr/lib/hadoop-yarn/bin",
                       user = 'ambari-qa',
                       conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/mapredsmokeoutput',
                       user = 'ambari-qa',
+                      bin_dir =  os.environ['PATH'] + os.pathsep + "/usr/bin" + os.pathsep + "/usr/lib/hadoop-yarn/bin",
                       conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()
@@ -73,11 +77,13 @@ class TestServiceCheck(RMFTestCase):
                       try_sleep = 5,
                       tries = 1,
                       user = 'ambari-qa',
+                      bin_dir =  os.environ['PATH'] + os.pathsep + "/usr/bin" + os.pathsep + "/usr/lib/hadoop-yarn/bin",
                       conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('ExecuteHadoop', 'fs -put /etc/passwd /user/ambari-qa/mapredsmokeinput',
                       try_sleep = 5,
                       tries = 1,
+                      bin_dir =  os.environ['PATH'] + os.pathsep + "/usr/bin" + os.pathsep + "/usr/lib/hadoop-yarn/bin",
                       user = 'ambari-qa',
                       conf_dir = '/etc/hadoop/conf',
     )
@@ -85,11 +91,13 @@ class TestServiceCheck(RMFTestCase):
                       logoutput = True,
                       try_sleep = 5,
                       tries = 1,
+                      bin_dir =  os.environ['PATH'] + os.pathsep + "/usr/bin" + os.pathsep + "/usr/lib/hadoop-yarn/bin",
                       user = 'ambari-qa',
                       conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/mapredsmokeoutput',
                       user = 'ambari-qa',
+                      bin_dir =  os.environ['PATH'] + os.pathsep + "/usr/bin" + os.pathsep + "/usr/lib/hadoop-yarn/bin",
                       conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()

+ 2 - 2
ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_service_check.py

@@ -43,7 +43,7 @@ class TestServiceCheck(RMFTestCase):
                           try_sleep = 5,
     )
     self.assertResourceCalled('Execute', 'yarn --config /etc/hadoop/conf node -list',
-                              environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/bin"},
+                              path = [os.environ['PATH'] + os.pathsep + "/usr/bin" + os.pathsep + "/usr/lib/hadoop-yarn/bin"],
                               user = 'ambari-qa',
     )
     self.assertNoMoreResources()
@@ -66,7 +66,7 @@ class TestServiceCheck(RMFTestCase):
                           try_sleep = 5,
     )
     self.assertResourceCalled('Execute', 'yarn --config /etc/hadoop/conf node -list',
-                          environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/bin"},
+                              path = [os.environ['PATH'] + os.pathsep + "/usr/bin" + os.pathsep + "/usr/lib/hadoop-yarn/bin"],
                           user = 'ambari-qa',
     )
     self.assertNoMoreResources()

+ 4 - 4
ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py

@@ -41,7 +41,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assert_configure_default()
     self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
                               not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
-                              environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin",
+                              environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin",
                                              'HADOOP_HOME' : '/usr'},
                               user = 'hive'
     )
@@ -85,7 +85,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assert_configure_secured()
     self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
                               not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
-                              environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin",
+                              environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin",
                                              'HADOOP_HOME' : '/usr'},
                               user = 'hive'
     )
@@ -180,7 +180,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
         creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
         path = ['/bin', '/usr/bin/'],
-        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
+        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"},
         not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
     )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
@@ -285,7 +285,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
         creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
         path = ['/bin', '/usr/bin/'],
-        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
+        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"},
         not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
     )
     self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',