Browse Source

AMBARI-8517. Run hdfs as non-root in secured cluster (aonishuk)

Andrew Onishuk 10 years ago
parent
commit
9280bb1d37
22 changed files with 169 additions and 173 deletions
  1. 2 2
      ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
  2. 3 1
      ambari-agent/src/test/python/resource_management/TestExecuteResource.py
  3. 3 3
      ambari-agent/src/test/python/resource_management/TestGroupResource.py
  4. 10 10
      ambari-agent/src/test/python/resource_management/TestUserResource.py
  5. 23 41
      ambari-common/src/main/python/resource_management/core/shell.py
  6. 1 1
      ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
  7. 8 3
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/shared_initialization.py
  8. 10 7
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/utils.py
  9. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
  10. 8 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
  11. 10 12
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
  12. 9 9
      ambari-server/src/test/python/stacks/1.3.2/HDFS/test_datanode.py
  13. 9 9
      ambari-server/src/test/python/stacks/1.3.2/HDFS/test_namenode.py
  14. 1 1
      ambari-server/src/test/python/stacks/1.3.2/HDFS/test_service_check.py
  15. 8 8
      ambari-server/src/test/python/stacks/1.3.2/HDFS/test_snamenode.py
  16. 20 20
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
  17. 8 8
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
  18. 16 16
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
  19. 1 1
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
  20. 8 8
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
  21. 8 8
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
  22. 2 1
      ambari-server/src/test/python/stacks/utils/RMFTestCase.py

+ 2 - 2
ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py

@@ -38,7 +38,7 @@ class TestCopyFromLocal(TestCase):
       call_arg_list = execute_hadoop_mock.call_args_list
       call_arg_list = execute_hadoop_mock.call_args_list
       self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
       self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
                        call_arg_list[0][0][0].command)
                        call_arg_list[0][0][0].command)
-      self.assertEquals({'not_if': "/usr/bin/sudo -Hi su - user1 -s /bin/bash -c 'export  PATH=/usr/bin ; {kinnit_if_needed} ; hadoop fs -ls {dest_path}'", 'user': 'user1', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'},
+      self.assertEquals({'not_if': "/usr/bin/sudo su user1 -l -s /bin/bash -c 'export  PATH=/usr/bin > /dev/null ; {kinnit_if_needed} ; hadoop fs -ls {dest_path}'", 'user': 'user1', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'},
                         call_arg_list[0][0][0].arguments)
                         call_arg_list[0][0][0].arguments)
       self.assertEquals('fs -chown user1 /apps/test//*.files', call_arg_list[1][0][0].command)
       self.assertEquals('fs -chown user1 /apps/test//*.files', call_arg_list[1][0][0].command)
       self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
       self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
@@ -59,7 +59,7 @@ class TestCopyFromLocal(TestCase):
       call_arg_list = execute_hadoop_mock.call_args_list
       call_arg_list = execute_hadoop_mock.call_args_list
       self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
       self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
                        call_arg_list[0][0][0].command)
                        call_arg_list[0][0][0].command)
-      self.assertEquals({'not_if': "/usr/bin/sudo -Hi su - user1 -s /bin/bash -c 'export  PATH=/usr/bin ; {kinnit_if_needed} ; hadoop fs -ls {dest_path}'", 'user': 'user1', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'},
+      self.assertEquals({'not_if': "/usr/bin/sudo su user1 -l -s /bin/bash -c 'export  PATH=/usr/bin > /dev/null ; {kinnit_if_needed} ; hadoop fs -ls {dest_path}'", 'user': 'user1', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'},
                         call_arg_list[0][0][0].arguments)
                         call_arg_list[0][0][0].arguments)
       self.assertEquals('fs -chown user1:hdfs /apps/test//*.files', call_arg_list[1][0][0].command)
       self.assertEquals('fs -chown user1:hdfs /apps/test//*.files', call_arg_list[1][0][0].command)
       self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
       self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': '/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)

+ 3 - 1
ambari-agent/src/test/python/resource_management/TestExecuteResource.py

@@ -176,7 +176,9 @@ class TestExecuteResource(TestCase):
                                  environment={'JAVA_HOME': '/test/java/home',
                                  environment={'JAVA_HOME': '/test/java/home',
                                               'PATH': "/bin"}
                                               'PATH': "/bin"}
       )
       )
-    expected_command = ['/usr/bin/sudo', '-Hi', 'su', 'test_user', '-', '-s', '/bin/bash', '-c', 'export  PATH=' + os.environ['PATH'] + ':/bin JAVA_HOME=/test/java/home ; echo "1"']
+      
+
+    expected_command = ['/bin/bash', '--login', '-c', '/usr/bin/sudo su test_user -l -s /bin/bash -c \'export  PATH=' + os.environ['PATH'] + ':/bin JAVA_HOME=/test/java/home > /dev/null ; echo "1"\'']
     self.assertEqual(popen_mock.call_args_list[0][0][0], expected_command)
     self.assertEqual(popen_mock.call_args_list[0][0][0], expected_command)
 
 
 
 

+ 3 - 3
ambari-agent/src/test/python/resource_management/TestGroupResource.py

@@ -45,7 +45,7 @@ class TestGroupResource(TestCase):
     
     
 
 
     self.assertEqual(popen_mock.call_count, 1)
     self.assertEqual(popen_mock.call_count, 1)
-    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -Hi groupadd -p secure hadoop"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
+    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -H groupadd -p secure hadoop"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
     getgrnam_mock.assert_called_with('hadoop')
     getgrnam_mock.assert_called_with('hadoop')
 
 
 
 
@@ -66,7 +66,7 @@ class TestGroupResource(TestCase):
     
     
 
 
     self.assertEqual(popen_mock.call_count, 1)
     self.assertEqual(popen_mock.call_count, 1)
-    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -Hi groupmod -p secure -g 2 mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
+    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -H groupmod -p secure -g 2 mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
     getgrnam_mock.assert_called_with('mapred')
     getgrnam_mock.assert_called_with('mapred')
 
 
 
 
@@ -90,7 +90,7 @@ class TestGroupResource(TestCase):
     except Fail:
     except Fail:
       pass
       pass
     self.assertEqual(popen_mock.call_count, 1)
     self.assertEqual(popen_mock.call_count, 1)
-    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -Hi groupmod -p secure -g 2 mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
+    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -H groupmod -p secure -g 2 mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
     getgrnam_mock.assert_called_with('mapred')
     getgrnam_mock.assert_called_with('mapred')
 
 
 
 

+ 10 - 10
ambari-agent/src/test/python/resource_management/TestUserResource.py

@@ -38,7 +38,7 @@ class TestUserResource(TestCase):
     with Environment('/') as env:
     with Environment('/') as env:
       user = User("mapred", action = "create", shell = "/bin/bash")
       user = User("mapred", action = "create", shell = "/bin/bash")
 
 
-    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -Hi useradd -m -s /bin/bash mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
+    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -H useradd -m -s /bin/bash mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
     self.assertEqual(popen_mock.call_count, 1)
     self.assertEqual(popen_mock.call_count, 1)
 
 
   @patch.object(subprocess, "Popen")
   @patch.object(subprocess, "Popen")
@@ -52,7 +52,7 @@ class TestUserResource(TestCase):
     with Environment('/') as env:
     with Environment('/') as env:
       user = User("mapred", action = "create", shell = "/bin/bash")
       user = User("mapred", action = "create", shell = "/bin/bash")
 
 
-    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -Hi usermod -s /bin/bash mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
+    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -H usermod -s /bin/bash mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
     self.assertEqual(popen_mock.call_count, 1)
     self.assertEqual(popen_mock.call_count, 1)
 
 
   @patch.object(subprocess, "Popen")
   @patch.object(subprocess, "Popen")
@@ -81,7 +81,7 @@ class TestUserResource(TestCase):
       user = User("mapred", action = "create", comment = "testComment", 
       user = User("mapred", action = "create", comment = "testComment", 
           shell = "/bin/bash")
           shell = "/bin/bash")
 
 
-    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -Hi usermod -c testComment -s /bin/bash mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
+    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -H usermod -c testComment -s /bin/bash mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
     self.assertEqual(popen_mock.call_count, 1)
     self.assertEqual(popen_mock.call_count, 1)
 
 
   @patch.object(subprocess, "Popen")
   @patch.object(subprocess, "Popen")
@@ -96,7 +96,7 @@ class TestUserResource(TestCase):
       user = User("mapred", action = "create", home = "/test/home", 
       user = User("mapred", action = "create", home = "/test/home", 
           shell = "/bin/bash")
           shell = "/bin/bash")
 
 
-    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -Hi usermod -s /bin/bash -d /test/home mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
+    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -H usermod -s /bin/bash -d /test/home mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
     self.assertEqual(popen_mock.call_count, 1)
     self.assertEqual(popen_mock.call_count, 1)
 
 
   @patch.object(subprocess, "Popen")
   @patch.object(subprocess, "Popen")
@@ -111,7 +111,7 @@ class TestUserResource(TestCase):
       user = User("mapred", action = "create", password = "secure", 
       user = User("mapred", action = "create", password = "secure", 
           shell = "/bin/bash")    
           shell = "/bin/bash")    
 
 
-    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -Hi usermod -s /bin/bash -p secure mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
+    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -H usermod -s /bin/bash -p secure mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
     self.assertEqual(popen_mock.call_count, 1)
     self.assertEqual(popen_mock.call_count, 1)
 
 
   @patch.object(subprocess, "Popen")
   @patch.object(subprocess, "Popen")
@@ -125,7 +125,7 @@ class TestUserResource(TestCase):
     with Environment('/') as env:
     with Environment('/') as env:
       user = User("mapred", action = "create", shell = "/bin/sh")
       user = User("mapred", action = "create", shell = "/bin/sh")
 
 
-    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -Hi usermod -s /bin/sh mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
+    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -H usermod -s /bin/sh mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
     self.assertEqual(popen_mock.call_count, 1)
     self.assertEqual(popen_mock.call_count, 1)
 
 
   @patch.object(subprocess, "Popen")
   @patch.object(subprocess, "Popen")
@@ -139,7 +139,7 @@ class TestUserResource(TestCase):
     with Environment('/') as env:
     with Environment('/') as env:
       user = User("mapred", action = "create", uid = "1", shell = "/bin/bash")
       user = User("mapred", action = "create", uid = "1", shell = "/bin/bash")
 
 
-    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -Hi usermod -s /bin/bash -u 1 mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
+    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -H usermod -s /bin/bash -u 1 mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
     self.assertEqual(popen_mock.call_count, 1)
     self.assertEqual(popen_mock.call_count, 1)
 
 
   @patch.object(subprocess, "Popen")
   @patch.object(subprocess, "Popen")
@@ -153,7 +153,7 @@ class TestUserResource(TestCase):
     with Environment('/') as env:
     with Environment('/') as env:
       user = User("mapred", action = "create", gid = "1", shell = "/bin/bash")
       user = User("mapred", action = "create", gid = "1", shell = "/bin/bash")
 
 
-    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -Hi usermod -s /bin/bash -g 1 mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
+    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -H usermod -s /bin/bash -g 1 mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
     self.assertEqual(popen_mock.call_count, 1)
     self.assertEqual(popen_mock.call_count, 1)
 
 
   @patch.object(subprocess, "Popen")
   @patch.object(subprocess, "Popen")
@@ -168,7 +168,7 @@ class TestUserResource(TestCase):
       user = User("mapred", action = "create", groups = ['1','2','3'], 
       user = User("mapred", action = "create", groups = ['1','2','3'], 
           shell = "/bin/bash")
           shell = "/bin/bash")
 
 
-    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -Hi usermod -G 1,2,3 -s /bin/bash mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
+    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -H usermod -G 1,2,3 -s /bin/bash mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
     self.assertEqual(popen_mock.call_count, 1)
     self.assertEqual(popen_mock.call_count, 1)
 
 
   @patch.object(subprocess, "Popen")
   @patch.object(subprocess, "Popen")
@@ -181,5 +181,5 @@ class TestUserResource(TestCase):
     with Environment('/') as env:
     with Environment('/') as env:
       user = User("mapred", action = "create")
       user = User("mapred", action = "create")
 
 
-    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -Hi useradd -m mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
+    popen_mock.assert_called_with(['/bin/bash', '--login', '-c', "/usr/bin/sudo  -H useradd -m mapred"], shell=False, preexec_fn=None, stderr=-2, stdout=-1, env={}, cwd=None)
     self.assertEqual(popen_mock.call_count, 1)
     self.assertEqual(popen_mock.call_count, 1)

+ 23 - 41
ambari-common/src/main/python/resource_management/core/shell.py

@@ -53,23 +53,8 @@ def _call(command, logoutput=False, throw_on_failure=True,
   
   
   @return: retrun_code, stdout
   @return: retrun_code, stdout
   """
   """
-  # convert to string and escape
-  if isinstance(command, (list, tuple)):
-    command = string_cmd_from_args_list(command)
-  elif sudo:
-    # Since ambari user sudoer privileges may be restricted,
-    # without having /bin/bash permission.
-    # Running interpreted shell commands in scope of 'sudo' is not possible.
-    #   
-    # In that case while passing string,
-    # any bash symbols eventually added to command like && || ; < > | << >> would cause problems.
-    #
-    # In case of need to create more complicated commands with sudo use as_sudo(command) function.
-    err_msg = Logger.get_protected_text(("String command '%s' cannot be run as sudo. Please supply the command as a tuple of arguments") % (command))
-    raise Fail(err_msg)
-  
   
   
-  # append current PATH, to env['PATH'] and path
+  # Append current PATH to env['PATH'] and path
   if 'PATH' in env:
   if 'PATH' in env:
     env['PATH'] = os.pathsep.join([os.environ['PATH'], env['PATH']])
     env['PATH'] = os.pathsep.join([os.environ['PATH'], env['PATH']])
   if path:
   if path:
@@ -77,21 +62,20 @@ def _call(command, logoutput=False, throw_on_failure=True,
       env['PATH'] = ''
       env['PATH'] = ''
     path = os.pathsep.join(path) if isinstance(path, (list, tuple)) else path
     path = os.pathsep.join(path) if isinstance(path, (list, tuple)) else path
     env['PATH'] = os.pathsep.join([os.environ['PATH'], path])
     env['PATH'] = os.pathsep.join([os.environ['PATH'], path])
-
-  # In case we will use sudo, we have to put all the environment inside the command, 
-  # since Popen environment gets reset within sudo.
-  environment_str = reduce(lambda str,x: '{0} {1}={2}'.format(str,x,quote_bash_args(env[x])), env,'')
-  command = command.replace(SUDO_ENVIRONMENT_PLACEHOLDER, environment_str, 1) # replace placeholder from as_sudo / as_user if present
-   
-  bash_run_command = command if not sudo else "/usr/bin/sudo {0} -Hi {1}".format(environment_str, command)
-  
-  if user:
-    # Outter environment gets reset within su. That's why we can't use environment passed to Popen.
-    su_export_command = "export {0} ; ".format(environment_str) if environment_str else ""
-    subprocess_command = ["/usr/bin/sudo","-Hi","su", user, "-", "-s", "/bin/bash", "-c", su_export_command + bash_run_command]
-  else:
-    subprocess_command = ["/bin/bash","--login","-c", bash_run_command]
     
     
+  # prepare command cmd
+  if sudo:
+    command = as_sudo(command, env=env)
+  elif user:
+    command = as_user(command, user, env=env)
+    
+  # convert to string and escape
+  if isinstance(command, (list, tuple)):
+    command = string_cmd_from_args_list(command)
+  # replace placeholder from as_sudo / as_user if present
+  command = command.replace(SUDO_ENVIRONMENT_PLACEHOLDER, get_environment_str(env), 1)
+  
+  subprocess_command = ["/bin/bash","--login","-c", command]
   proc = subprocess.Popen(subprocess_command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
   proc = subprocess.Popen(subprocess_command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
                           cwd=cwd, env=env, shell=False,
                           cwd=cwd, env=env, shell=False,
                           preexec_fn=preexec_fn)
                           preexec_fn=preexec_fn)
@@ -139,25 +123,23 @@ def as_sudo(command, env=SUDO_ENVIRONMENT_PLACEHOLDER):
     #   
     #   
     # In that case while passing string,
     # In that case while passing string,
     # any bash symbols eventually added to command like && || ; < > | << >> would cause problems.
     # any bash symbols eventually added to command like && || ; < > | << >> would cause problems.
-    err_msg = Logger.get_protected_text(("String command '%s' cannot be run as sudo. Please supply the command as a tuple/list of arguments") % (command))
+    err_msg = Logger.get_protected_text(("String command '%s' cannot be run as sudo. Please supply the command as a tuple of arguments") % (command))
     raise Fail(err_msg)
     raise Fail(err_msg)
   
   
-  if env != SUDO_ENVIRONMENT_PLACEHOLDER:
-    env = reduce(lambda str,x: '{0} {1}={2}'.format(str,x,quote_bash_args(env[x])), env, '')
-  
-  return "/usr/bin/sudo {0} -Hi {1}".format(env, command)
+  env = get_environment_str(env) if env != SUDO_ENVIRONMENT_PLACEHOLDER else SUDO_ENVIRONMENT_PLACEHOLDER
+  return "/usr/bin/sudo {0} -H {1}".format(env, command)
 
 
 def as_user(command, user , env=SUDO_ENVIRONMENT_PLACEHOLDER):
 def as_user(command, user , env=SUDO_ENVIRONMENT_PLACEHOLDER):
   if isinstance(command, (list, tuple)):
   if isinstance(command, (list, tuple)):
     command = string_cmd_from_args_list(command)
     command = string_cmd_from_args_list(command)
     
     
-  if env != SUDO_ENVIRONMENT_PLACEHOLDER:
-    env = reduce(lambda str,x: '{0} {1}={2}'.format(str,x,quote_bash_args(env[x])), env, '')
-    
-  export_command = "export {0} ; ".format(env)
+  env = get_environment_str(env) if env != SUDO_ENVIRONMENT_PLACEHOLDER else SUDO_ENVIRONMENT_PLACEHOLDER
+  export_command = "export {0} > /dev/null ; ".format(env)
   
   
-  result_command = "/usr/bin/sudo -Hi su - {0} -s /bin/bash -c {1}".format(user, quote_bash_args(export_command + command))
-  return result_command
+  return "/usr/bin/sudo su {0} -l -s /bin/bash -c {1}".format(user, quote_bash_args(export_command + command))
+
+def get_environment_str(env):
+  return reduce(lambda str,x: '{0} {1}={2}'.format(str,x,quote_bash_args(env[x])), env, '')
 
 
 def string_cmd_from_args_list(command):
 def string_cmd_from_args_list(command):
   return ' '.join(quote_bash_args(x) for x in command)
   return ' '.join(quote_bash_args(x) for x in command)

+ 1 - 1
ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py

@@ -104,7 +104,7 @@ class HdfsDirectoryProvider(Provider):
                    chown_cmd=' && '.join(chown_commands)),
                    chown_cmd=' && '.join(chown_commands)),
             user=hdp_hdfs_user,
             user=hdp_hdfs_user,
             path=bin_dir,
             path=bin_dir,
-            not_if=as_user("hadoop --config {hdp_conf_dir} fs -ls {dir_list_str}", hdp_hdfs_user)
+            not_if=as_user(format("hadoop --config {hdp_conf_dir} fs -ls {dir_list_str}"), hdp_hdfs_user)
     )
     )
 
 
     directories_list[:] = []
     directories_list[:] = []

+ 8 - 3
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/shared_initialization.py

@@ -50,11 +50,16 @@ def setup_jce():
   
   
   if params.security_enabled:
   if params.security_enabled:
     security_dir = format("{java_home}/jre/lib/security")
     security_dir = format("{java_home}/jre/lib/security")
-    extract_cmd = format("rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q {jce_curl_target}")
+    
+    File([format("{security_dir}/US_export_policy.jar"), format("{security_dir}/local_policy.jar")],
+         action = "delete",
+    )
+    
+    extract_cmd = ("unzip", "-o", "-j", "-q", jce_curl_target, "-d", security_dir) 
     Execute(extract_cmd,
     Execute(extract_cmd,
             only_if = format("test -e {security_dir} && test -f {jce_curl_target}"),
             only_if = format("test -e {security_dir} && test -f {jce_curl_target}"),
-            cwd  = security_dir,
-            path = ['/bin/','/usr/bin']
+            path = ['/bin/','/usr/bin'],
+            sudo = True
     )
     )
     
     
 
 

+ 10 - 7
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/utils.py

@@ -30,10 +30,8 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
   check_process = format(
   check_process = format(
     "ls {pid_file} >/dev/null 2>&1 &&"
     "ls {pid_file} >/dev/null 2>&1 &&"
     " ps -p `cat {pid_file}` >/dev/null 2>&1")
     " ps -p `cat {pid_file}` >/dev/null 2>&1")
-  hadoop_daemon = format(
-    "export HADOOP_LIBEXEC_DIR={hadoop_libexec_dir} && "
-    "{hadoop_bin}/hadoop-daemon.sh")
-  cmd = format("{hadoop_daemon} --config {hadoop_conf_dir}")
+  hadoop_daemon = format("{hadoop_bin}/hadoop-daemon.sh")
+  hadoop_env_exports = {'HADOOP_LIBEXEC_DIR': params.hadoop_libexec_dir}
 
 
   if create_pid_dir:
   if create_pid_dir:
     Directory(pid_dir,
     Directory(pid_dir,
@@ -49,7 +47,12 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
       pid_file = format(
       pid_file = format(
         "{hadoop_pid_dir_prefix}/{hdfs_user}/hadoop-{hdfs_user}-{name}.pid")
         "{hadoop_pid_dir_prefix}/{hdfs_user}/hadoop-{hdfs_user}-{name}.pid")
 
 
-  daemon_cmd = format("{ulimit_cmd} {cmd} {action} {name}")
+  if user == "root":
+    cmd = [hadoop_daemon, "--config", params.hadoop_conf_dir]
+    daemon_cmd = as_sudo(cmd + [action, name])
+  else:
+    cmd = format("{hadoop_daemon} --config {hadoop_conf_dir}")
+    daemon_cmd = as_user(format("{ulimit_cmd} {cmd} {action} {name}"), user)
 
 
   service_is_up = check_process if action == "start" else None
   service_is_up = check_process if action == "start" else None
   #remove pid file from dead process
   #remove pid file from dead process
@@ -59,8 +62,8 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
        
        
   )
   )
   Execute(daemon_cmd,
   Execute(daemon_cmd,
-          user=user,
-          not_if=service_is_up
+          not_if=service_is_up,
+          environment=hadoop_env_exports,
   )
   )
   if action == "stop":
   if action == "stop":
     File(pid_file,
     File(pid_file,

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py

@@ -22,7 +22,7 @@ from resource_management import *
 def setup_hdp_install_directory():
 def setup_hdp_install_directory():
   import params
   import params
   if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
   if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
-    Execute(format('ambari-python-wrap /usr/bin/hdp-select set all `ambari-python-wrap /usr/bin/hdp-select versions | grep ^{hdp_stack_version} | tail -1`'),
+    Execute(format('sudo /usr/bin/hdp-select set all `ambari-python-wrap /usr/bin/hdp-select versions | grep ^{hdp_stack_version} | tail -1`'),
             only_if=format('ls -d /usr/hdp/{hdp_stack_version}*')
             only_if=format('ls -d /usr/hdp/{hdp_stack_version}*')
     )
     )
 
 

+ 8 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py

@@ -50,11 +50,16 @@ def setup_jce():
   
   
   if params.security_enabled:
   if params.security_enabled:
     security_dir = format("{java_home}/jre/lib/security")
     security_dir = format("{java_home}/jre/lib/security")
-    extract_cmd = format("rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q {jce_curl_target}")
+    
+    File([format("{security_dir}/US_export_policy.jar"), format("{security_dir}/local_policy.jar")],
+         action = "delete",
+    )
+    
+    extract_cmd = ("unzip", "-o", "-j", "-q", jce_curl_target, "-d", security_dir) 
     Execute(extract_cmd,
     Execute(extract_cmd,
             only_if = format("test -e {security_dir} && test -f {jce_curl_target}"),
             only_if = format("test -e {security_dir} && test -f {jce_curl_target}"),
-            cwd  = security_dir,
-            path = ['/bin/','/usr/bin']
+            path = ['/bin/','/usr/bin'],
+            sudo = True
     )
     )
 
 
 def setup_users():
 def setup_users():

+ 10 - 12
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py

@@ -77,26 +77,24 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
           pass
           pass
 
 
 
 
-  hadoop_env_exports_str = ''
-  for exp in hadoop_env_exports.items():
-    hadoop_env_exports_str += "export {0}={1} && ".format(exp[0], exp[1])
-
-  hadoop_daemon = format(
-    "{hadoop_env_exports_str}"
-    "{hadoop_bin}/hadoop-daemon.sh")
-  cmd = format("{hadoop_daemon} --config {hadoop_conf_dir}")
-
-  daemon_cmd = format("{ulimit_cmd} {cmd} {action} {name}")
+  hadoop_daemon = format("{hadoop_bin}/hadoop-daemon.sh")
 
 
+  if user == "root":
+    cmd = [hadoop_daemon, "--config", params.hadoop_conf_dir]
+    daemon_cmd = as_sudo(cmd + [action, name])
+  else:
+    cmd = format("{hadoop_daemon} --config {hadoop_conf_dir}")
+    daemon_cmd = as_user(format("{ulimit_cmd} {cmd} {action} {name}"), user)
+     
   service_is_up = check_process if action == "start" else None
   service_is_up = check_process if action == "start" else None
   #remove pid file from dead process
   #remove pid file from dead process
   File(pid_file,
   File(pid_file,
        action="delete",
        action="delete",
-       not_if=check_process,
+       not_if=check_process
   )
   )
   Execute(daemon_cmd,
   Execute(daemon_cmd,
           not_if=service_is_up,
           not_if=service_is_up,
-          user=user
+          environment=hadoop_env_exports
   )
   )
 
 
   #After performing the desired action, perform additional tasks.
   #After performing the desired action, perform additional tasks.

+ 9 - 9
ambari-server/src/test/python/stacks/1.3.2/HDFS/test_datanode.py

@@ -49,9 +49,9 @@ class TestDatanode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
-        user = 'hdfs',
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
@@ -73,9 +73,9 @@ class TestDatanode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = None,
         not_if = None,
-        user = 'hdfs',
     )
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               action = ['delete'],
@@ -110,9 +110,9 @@ class TestDatanode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
-                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
-                              user = 'root',
+    self.assertResourceCalled('Execute', '/usr/bin/sudo {ENV_PLACEHOLDER} -H /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
+        not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
@@ -134,9 +134,9 @@ class TestDatanode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
+    self.assertResourceCalled('Execute', '/usr/bin/sudo {ENV_PLACEHOLDER} -H /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = None,
         not_if = None,
-        user = 'root',
     )
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               action = ['delete'],

+ 9 - 9
ambari-server/src/test/python/stacks/1.3.2/HDFS/test_namenode.py

@@ -62,9 +62,9 @@ class TestNamenode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode',
-                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
-                              user = 'hdfs',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
+        not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
     )
     )
     self.assertResourceCalled('Execute', "hadoop dfsadmin -safemode get | grep 'Safe mode is OFF'",
     self.assertResourceCalled('Execute', "hadoop dfsadmin -safemode get | grep 'Safe mode is OFF'",
                               tries = 40,
                               tries = 40,
@@ -111,9 +111,9 @@ class TestNamenode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = None,
         not_if = None,
-        user = 'hdfs',
     )
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
                               action = ['delete'],
                               action = ['delete'],
@@ -159,9 +159,9 @@ class TestNamenode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
-        user = 'hdfs',
     )
     )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
                               user = 'hdfs',
                               user = 'hdfs',
@@ -211,9 +211,9 @@ class TestNamenode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = None,
         not_if = None,
-        user = 'hdfs',
     )
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
                               action = ['delete'],
                               action = ['delete'],

+ 1 - 1
ambari-server/src/test/python/stacks/1.3.2/HDFS/test_service_check.py

@@ -54,7 +54,7 @@ class TestServiceCheck(RMFTestCase):
     self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp ; hadoop fs -chmod 777 /tmp',
     self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp ; hadoop fs -chmod 777 /tmp',
                               conf_dir = '/etc/hadoop/conf',
                               conf_dir = '/etc/hadoop/conf',
                               logoutput = True,
                               logoutput = True,
-                              not_if = "/usr/bin/sudo -Hi su - ambari-qa -s /bin/bash -c 'export {ENV_PLACEHOLDER} ; hadoop fs -test -e /tmp'",
+                              not_if = "/usr/bin/sudo su ambari-qa -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; hadoop fs -test -e /tmp'",
                               try_sleep = 3,
                               try_sleep = 3,
                               tries = 5,
                               tries = 5,
                               user = 'ambari-qa',
                               user = 'ambari-qa',

+ 8 - 8
ambari-server/src/test/python/stacks/1.3.2/HDFS/test_snamenode.py

@@ -61,9 +61,9 @@ class TestSNamenode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
-        user = 'hdfs',
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
@@ -85,9 +85,9 @@ class TestSNamenode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop secondarynamenode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop secondarynamenode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = None,
         not_if = None,
-        user = 'hdfs',
     )
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
                               action = ['delete'],
                               action = ['delete'],
@@ -132,9 +132,9 @@ class TestSNamenode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
-        user = 'hdfs',
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
@@ -156,9 +156,9 @@ class TestSNamenode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop secondarynamenode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop secondarynamenode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = None,
         not_if = None,
-        user = 'hdfs',
     )
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
                               action = ['delete'],
                               action = ['delete'],

+ 20 - 20
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py

@@ -57,9 +57,9 @@ class TestDatanode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
-        user = 'hdfs',
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
@@ -87,9 +87,9 @@ class TestDatanode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = None,
         not_if = None,
-        user = 'hdfs',
     )
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               action = ['delete'],
@@ -129,14 +129,14 @@ class TestDatanode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
+    self.assertResourceCalled('Execute', '/usr/bin/sudo {ENV_PLACEHOLDER} -H /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
-        user = 'root',
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def test_start_secured_HDP22_root(self):
   def test_start_secured_HDP22_root(self):
-    config_file = "stacks/2.0.6/configs/secured.json"
+    config_file = self._getSrcFolder()+"/test/python/stacks/2.0.6/configs/secured.json"
     with open(config_file, "r") as f:
     with open(config_file, "r") as f:
       secured_json = json.load(f)
       secured_json = json.load(f)
 
 
@@ -165,14 +165,14 @@ class TestDatanode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/hdp/current/hadoop-client/libexec && /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
+    self.assertResourceCalled('Execute', '/usr/bin/sudo {ENV_PLACEHOLDER} -H /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/current/hadoop-client/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
-        user = 'root',
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
   def test_start_secured_HDP22_non_root_https_only(self):
   def test_start_secured_HDP22_non_root_https_only(self):
-    config_file = "stacks/2.0.6/configs/secured.json"
+    config_file = self._getSrcFolder()+"/test/python/stacks/2.0.6/configs/secured.json"
     with open(config_file, "r") as f:
     with open(config_file, "r") as f:
       secured_json = json.load(f)
       secured_json = json.load(f)
 
 
@@ -204,9 +204,9 @@ class TestDatanode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/hdp/current/hadoop-client/libexec && /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/current/hadoop-client/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
-        user = 'hdfs',
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
@@ -234,9 +234,9 @@ class TestDatanode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
+    self.assertResourceCalled('Execute', '/usr/bin/sudo {ENV_PLACEHOLDER} -H /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = None,
         not_if = None,
-        user = 'root',
     )
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               action = ['delete'],
@@ -246,7 +246,7 @@ class TestDatanode(RMFTestCase):
 
 
   @patch("os.path.exists", new = MagicMock(return_value=False))
   @patch("os.path.exists", new = MagicMock(return_value=False))
   def test_stop_secured_HDP22_root(self):
   def test_stop_secured_HDP22_root(self):
-    config_file = "stacks/2.0.6/configs/secured.json"
+    config_file = self._getSrcFolder()+"/test/python/stacks/2.0.6/configs/secured.json"
     with open(config_file, "r") as f:
     with open(config_file, "r") as f:
       secured_json = json.load(f)
       secured_json = json.load(f)
 
 
@@ -274,9 +274,9 @@ class TestDatanode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/hdp/current/hadoop-client/libexec && /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
+    self.assertResourceCalled('Execute', '/usr/bin/sudo {ENV_PLACEHOLDER} -H /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/current/hadoop-client/libexec'},
         not_if = None,
         not_if = None,
-        user = 'root',
     )
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               action = ['delete'],
@@ -285,7 +285,7 @@ class TestDatanode(RMFTestCase):
 
 
   @patch("os.path.exists", new = MagicMock(return_value=False))
   @patch("os.path.exists", new = MagicMock(return_value=False))
   def test_stop_secured_HDP22_non_root_https_only(self):
   def test_stop_secured_HDP22_non_root_https_only(self):
-    config_file = "stacks/2.0.6/configs/secured.json"
+    config_file = self._getSrcFolder()+"/test/python/stacks/2.0.6/configs/secured.json"
     with open(config_file, "r") as f:
     with open(config_file, "r") as f:
       secured_json = json.load(f)
       secured_json = json.load(f)
 
 
@@ -316,9 +316,9 @@ class TestDatanode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/hdp/current/hadoop-client/libexec && /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/current/hadoop-client/libexec'},
         not_if = None,
         not_if = None,
-        user = 'hdfs',
     )
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action=['delete'],
                               action=['delete'],

+ 8 - 8
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py

@@ -56,9 +56,9 @@ class TestJournalnode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start journalnode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start journalnode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` >/dev/null 2>&1',
-        user = 'hdfs',
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
@@ -80,9 +80,9 @@ class TestJournalnode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop journalnode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop journalnode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = None,
         not_if = None,
-        user = 'hdfs',
     )
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid',
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid',
                               action = ['delete'],
                               action = ['delete'],
@@ -122,9 +122,9 @@ class TestJournalnode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start journalnode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start journalnode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` >/dev/null 2>&1',
-        user = 'hdfs',
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
@@ -146,9 +146,9 @@ class TestJournalnode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop journalnode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop journalnode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = None,
         not_if = None,
-        user = 'hdfs',
     )
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid',
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid',
                               action = ['delete'],
                               action = ['delete'],

+ 16 - 16
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py

@@ -78,9 +78,9 @@ class TestNamenode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
-        user = 'hdfs',
     )
     )
     self.assertResourceCalled('Execute', 'hdfs --config /etc/hadoop/conf dfsadmin -safemode leave',
     self.assertResourceCalled('Execute', 'hdfs --config /etc/hadoop/conf dfsadmin -safemode leave',
         path = ['/usr/bin'],
         path = ['/usr/bin'],
@@ -137,9 +137,9 @@ class TestNamenode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = None,
         not_if = None,
-        user = 'hdfs',
     )
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
                               action = ['delete'],
                               action = ['delete'],
@@ -195,9 +195,9 @@ class TestNamenode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
-        user = 'hdfs',
     )
     )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
                               user='hdfs',
                               user='hdfs',
@@ -257,9 +257,9 @@ class TestNamenode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = None,
         not_if = None,
-        user = 'hdfs',
     )
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
                               action = ['delete'],
                               action = ['delete'],
@@ -295,14 +295,14 @@ class TestNamenode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
-        user = 'hdfs',
     )
     )
     self.assertResourceCalled('Execute', "hadoop dfsadmin -safemode get | grep 'Safe mode is OFF'",
     self.assertResourceCalled('Execute', "hadoop dfsadmin -safemode get | grep 'Safe mode is OFF'",
                               path = ['/usr/bin'],
                               path = ['/usr/bin'],
                               tries = 40,
                               tries = 40,
-                              only_if = "/usr/bin/sudo -Hi su - hdfs -s /bin/bash -c 'export  PATH=/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+                              only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/usr/bin > /dev/null ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
                               user = 'hdfs',
                               user = 'hdfs',
                               try_sleep = 10,
                               try_sleep = 10,
     )
     )
@@ -336,7 +336,7 @@ class TestNamenode(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               kinit_path_local = '/usr/bin/kinit',
                               action = ['create'],
                               action = ['create'],
                               bin_dir = '/usr/bin',
                               bin_dir = '/usr/bin',
-                              only_if = "/usr/bin/sudo -Hi su - hdfs -s /bin/bash -c 'export  PATH=/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+                              only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/usr/bin > /dev/null ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
                               )
                               )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
@@ -369,9 +369,9 @@ class TestNamenode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
-        user = 'hdfs',
     )
     )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs',
         user = 'hdfs',
         user = 'hdfs',
@@ -379,7 +379,7 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('Execute', "hadoop dfsadmin -safemode get | grep 'Safe mode is OFF'",
     self.assertResourceCalled('Execute', "hadoop dfsadmin -safemode get | grep 'Safe mode is OFF'",
         path = ['/usr/bin'],
         path = ['/usr/bin'],
         tries = 40,
         tries = 40,
-        only_if = "/usr/bin/sudo -Hi su - hdfs -s /bin/bash -c 'export  PATH=/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+        only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/usr/bin > /dev/null ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
         user = 'hdfs',
         user = 'hdfs',
         try_sleep = 10,
         try_sleep = 10,
     )
     )
@@ -413,7 +413,7 @@ class TestNamenode(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               kinit_path_local = '/usr/bin/kinit',
                               action = ['create'],
                               action = ['create'],
                               bin_dir = '/usr/bin',
                               bin_dir = '/usr/bin',
-                              only_if = "/usr/bin/sudo -Hi su - hdfs -s /bin/bash -c 'export  PATH=/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+                              only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/usr/bin > /dev/null ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
                              )
                              )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 

+ 1 - 1
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py

@@ -55,7 +55,7 @@ class TestServiceCheck(RMFTestCase):
     self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp',
     self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp',
         conf_dir = '/etc/hadoop/conf',
         conf_dir = '/etc/hadoop/conf',
         logoutput = True,
         logoutput = True,
-        not_if = "/usr/bin/sudo -Hi su - ambari-qa -s /bin/bash -c 'export {ENV_PLACEHOLDER} ; /usr/bin/hadoop --config /etc/hadoop/conf fs -test -e /tmp'",
+        not_if = "/usr/bin/sudo su ambari-qa -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; /usr/bin/hadoop --config /etc/hadoop/conf fs -test -e /tmp'",
         try_sleep = 3,
         try_sleep = 3,
         tries = 5,
         tries = 5,
         bin_dir = '/usr/bin',
         bin_dir = '/usr/bin',

+ 8 - 8
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py

@@ -66,9 +66,9 @@ class TestSNamenode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
-        user = 'hdfs',
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
@@ -95,9 +95,9 @@ class TestSNamenode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop secondarynamenode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop secondarynamenode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = None,
         not_if = None,
-        user = 'hdfs',
     )
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
                               action = ['delete'],
                               action = ['delete'],
@@ -147,9 +147,9 @@ class TestSNamenode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
-        user = 'hdfs',
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
@@ -176,9 +176,9 @@ class TestSNamenode(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop secondarynamenode',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop secondarynamenode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = None,
         not_if = None,
-        user = 'hdfs',
     )
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
                               action = ['delete'],
                               action = ['delete'],

+ 8 - 8
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py

@@ -76,9 +76,9 @@ class TestZkfc(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
                               not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
-        user = 'hdfs',
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
@@ -101,9 +101,9 @@ class TestZkfc(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop zkfc',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop zkfc'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = None,
         not_if = None,
-        user = 'hdfs',
     )
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
                               action = ['delete'],
                               action = ['delete'],
@@ -163,9 +163,9 @@ class TestZkfc(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
                               not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
-        user = 'hdfs',
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
@@ -187,9 +187,9 @@ class TestZkfc(RMFTestCase):
                               action = ['delete'],
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
                               )
                               )
-    self.assertResourceCalled('Execute', 'ulimit -c unlimited &&  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop zkfc',
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export {ENV_PLACEHOLDER} > /dev/null ; ulimit -c unlimited &&  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop zkfc'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = None,
         not_if = None,
-        user = 'hdfs',
     )
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
                               action = ['delete'],
                               action = ['delete'],

+ 2 - 1
ambari-server/src/test/python/stacks/utils/RMFTestCase.py

@@ -39,6 +39,7 @@ PATH_TO_STACK_TESTS = "test/python/stacks/"
 
 
 PATH_TO_CUSTOM_ACTIONS = "main/resources/custom_actions"
 PATH_TO_CUSTOM_ACTIONS = "main/resources/custom_actions"
 PATH_TO_CUSTOM_ACTION_TESTS = "test/python/custom_actions"
 PATH_TO_CUSTOM_ACTION_TESTS = "test/python/custom_actions"
+MAX_SHOWN_DICT_LEN = 10
 
 
 
 
 class RMFTestCase(TestCase):
 class RMFTestCase(TestCase):
@@ -134,7 +135,7 @@ class RMFTestCase(TestCase):
     return method
     return method
   
   
   def _ppformat(self, val):
   def _ppformat(self, val):
-    if isinstance(val, dict):
+    if isinstance(val, dict) and len(val) > MAX_SHOWN_DICT_LEN:
       return "self.getConfig()['configurations']['?']"
       return "self.getConfig()['configurations']['?']"
     
     
     val = pprint.pformat(val)
     val = pprint.pformat(val)