Kaynağa Gözat

AMBARI-8689. Fix multiple issues with custom commands and actions on non-root (aonishuk)

Andrew Onishuk 10 yıl önce
ebeveyn
işleme
0ebde089d0
23 değiştirilmiş dosya ile 184 ekleme ve 147 silme
  1. 13 7
      ambari-agent/conf/unix/ambari-agent
  2. 18 6
      ambari-common/src/main/python/resource_management/core/shell.py
  3. 14 13
      ambari-common/src/main/python/resource_management/libraries/functions/check_process_status.py
  4. 2 2
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat_service_check.py
  5. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_service.py
  6. 5 4
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
  7. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
  8. 5 4
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/files/removeMysqlUser.sh
  9. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
  10. 4 4
      ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py
  11. 4 2
      ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
  12. 7 7
      ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
  13. 6 6
      ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
  14. 24 24
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
  15. 2 2
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
  16. 3 3
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
  17. 47 35
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
  18. 8 8
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py
  19. 2 2
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
  20. 2 2
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_service_check.py
  21. 2 2
      ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
  22. 3 1
      ambari-server/src/test/python/stacks/utils/RMFTestCase.py
  23. 9 9
      ambari-web/app/messages.js

+ 13 - 7
ambari-agent/conf/unix/ambari-agent

@@ -55,13 +55,14 @@ if [ "$?" != "0" ]; then
 fi
 
 current_user=`awk -v val=$EUID -F ":" '$3==val{print $1}' /etc/passwd`
-# setup necessary ownership
-sudo chown -R $current_user "/var/lib/ambari-agent/ambari-env.sh"
-sudo chown -R $current_user "/var/run/ambari-agent"
-sudo chown -R $current_user "/var/log/ambari-agent"
-sudo chown -R $current_user "/var/lib/ambari-agent/data"
-sudo chown -R $current_user "/var/lib/ambari-agent/cache"
-sudo chown 	  $current_user "/usr/lib/ambari-agent"
+
+change_files_permissions() {
+	sudo chown -R $current_user "/var/run/ambari-agent"
+	sudo chown -R $current_user "/var/log/ambari-agent"
+	sudo chown -R $current_user "/var/lib/ambari-agent/data"
+	sudo chown -R $current_user "/var/lib/ambari-agent/cache"
+	sudo chown 	  $current_user "/usr/lib/ambari-agent"
+}
 
 if [ -a /usr/bin/python2.7 ] && [ -z "$PYTHON" ]; then
   PYTHON=/usr/bin/python2.7
@@ -84,6 +85,7 @@ fi
 
 # Reading the environment file
 if [ -a /var/lib/ambari-agent/ambari-env.sh ]; then
+  sudo chown -R $current_user "/var/lib/ambari-agent/ambari-env.sh"
   . /var/lib/ambari-agent/ambari-env.sh
 fi
 
@@ -140,6 +142,8 @@ case "$1" in
             exit -1
           fi
         fi
+        change_files_permissions
+        
         echo "Starting ambari-agent"
         nohup $PYTHON $AMBARI_AGENT_PY_SCRIPT "$@" > $OUTFILE 2>&1 &
         sleep 2
@@ -201,6 +205,7 @@ case "$1" in
             tput sgr0
           else
             echo "Stopping $AMBARI_AGENT"
+            change_files_permissions
             $PYTHON $AGENT_SCRIPT stop
           fi
           echo "Removing PID file at $PIDFILE"
@@ -230,6 +235,7 @@ case "$1" in
             exit 1
           fi
           echo -e "Resetting $AMBARI_AGENT"
+          change_files_permissions
           $PYTHON $AGENT_SCRIPT reset $2
           retcode=$?
 

+ 18 - 6
ambari-common/src/main/python/resource_management/core/shell.py

@@ -48,7 +48,7 @@ def call(command, verbose=False, logoutput=False,
   @return: return_code, stdout
   """
   return _call(command, verbose, logoutput, False, cwd, env, preexec_fn, user, wait_for_finish, timeout, path, output_file, sudo)
-            
+
 def _call(command, verbose=False, logoutput=False, throw_on_failure=True,
          cwd=None, env=None, preexec_fn=None, user=None, wait_for_finish=True, timeout=None, path=None, output_file=None, sudo=False):
   """
@@ -62,9 +62,9 @@ def _call(command, verbose=False, logoutput=False, throw_on_failure=True,
   @return: return_code, stdout
   """
 
-  # Append current PATH to env['PATH'] and path
-  env = {} if not env else env
-  env['PATH'] = os.pathsep.join([os.environ['PATH'], env['PATH']]) if 'PATH' in env else os.environ['PATH']
+  # Append current PATH to env['PATH']
+  env = add_current_path_to_env(env)
+  # Append path to env['PATH']
   if path:
     path = os.pathsep.join(path) if isinstance(path, (list, tuple)) else path
     env['PATH'] = os.pathsep.join([env['PATH'], path])
@@ -135,18 +135,30 @@ def as_sudo(command, env=SUDO_ENVIRONMENT_PLACEHOLDER):
     err_msg = Logger.get_protected_text(("String command '%s' cannot be run as sudo. Please supply the command as a tuple of arguments") % (command))
     raise Fail(err_msg)
   
-  env = get_environment_str(env) if env != SUDO_ENVIRONMENT_PLACEHOLDER else SUDO_ENVIRONMENT_PLACEHOLDER
+  env = get_environment_str(add_current_path_to_env(env)) if env != SUDO_ENVIRONMENT_PLACEHOLDER else SUDO_ENVIRONMENT_PLACEHOLDER
   return "/usr/bin/sudo {0} -H -E {1}".format(env, command)
 
 def as_user(command, user , env=SUDO_ENVIRONMENT_PLACEHOLDER):
   if isinstance(command, (list, tuple)):
     command = string_cmd_from_args_list(command)
     
-  env = get_environment_str(env) if env != SUDO_ENVIRONMENT_PLACEHOLDER else SUDO_ENVIRONMENT_PLACEHOLDER
+  env = get_environment_str(add_current_path_to_env(env)) if env != SUDO_ENVIRONMENT_PLACEHOLDER else SUDO_ENVIRONMENT_PLACEHOLDER
   export_command = "export {0} > /dev/null ; ".format(env)
   
   return "/usr/bin/sudo su {0} -l -s /bin/bash -c {1}".format(user, quote_bash_args(export_command + command))
 
+def add_current_path_to_env(env):
+  result = {} if not env else env
+  
+  if not 'PATH' in result:
+    result['PATH'] = os.environ['PATH']
+    
+  # don't append current env if already there
+  if not set(os.environ['PATH'].split(os.pathsep)).issubset(result['PATH'].split(os.pathsep)):
+    result['PATH'] = os.pathsep.join([os.environ['PATH'], result['PATH']])
+  
+  return result
+  
 def get_environment_str(env):
   return reduce(lambda str,x: '{0} {1}={2}'.format(str,x,quote_bash_args(env[x])), env, '')
 

+ 14 - 13
ambari-common/src/main/python/resource_management/libraries/functions/check_process_status.py

@@ -23,6 +23,7 @@ Ambari Agent
 from resource_management.core.exceptions import ComponentIsNotRunning
 from resource_management.core.logger import Logger
 from resource_management.core import shell
+from resource_management.core import sudo
 __all__ = ["check_process_status"]
 
 import os
@@ -38,17 +39,17 @@ def check_process_status(pid_file):
   """
   if not pid_file or not os.path.isfile(pid_file):
     raise ComponentIsNotRunning()
-  with open(pid_file, "r") as f:
-    try:
-      pid = int(f.read())
-    except:
-      Logger.debug("Pid file {0} does not exist".format(pid_file))
-      raise ComponentIsNotRunning()
-
-    code, out = shell.call(["ps","-p", str(pid)])
-    
-    if code:
-      Logger.debug("Process with pid {0} is not running. Stale pid file"
-                " at {1}".format(pid, pid_file))
-      raise ComponentIsNotRunning()
+  
+  try:
+    pid = int(sudo.read_file(pid_file))
+  except:
+    Logger.debug("Pid file {0} does not exist".format(pid_file))
+    raise ComponentIsNotRunning()
+
+  code, out = shell.call(["ps","-p", str(pid)])
+  
+  if code:
+    Logger.debug("Process with pid {0} is not running. Stale pid file"
+              " at {1}".format(pid, pid_file))
+    raise ComponentIsNotRunning()
   pass

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat_service_check.py

@@ -45,7 +45,7 @@ def hcat_service_check():
             tries=3,
             user=params.smokeuser,
             try_sleep=5,
-            path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+            path=['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin'],
             logoutput=True)
 
     if params.security_enabled:
@@ -74,6 +74,6 @@ def hcat_service_check():
             tries=3,
             user=params.smokeuser,
             try_sleep=5,
-            path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+            path=['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin'],
             logoutput=True
     )

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_service.py

@@ -45,7 +45,7 @@ def hbase_service(
         user = params.hbase_user,
         # BUGFIX: hbase regionserver sometimes hangs when nn is in safemode
         timeout = 30,
-        on_timeout = format("{no_op_test} && kill -9 `cat {pid_file}`")
+        on_timeout = format("! ( {no_op_test} ) || sudo -H -E kill -9 `cat {pid_file}`"),
       )
       
       Execute (format("rm -f {pid_file}"))

+ 5 - 4
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py

@@ -115,8 +115,8 @@ class NameNode(Script):
     
     
     def startRebalancingProcess(threshold):
-      rebalanceCommand = format('export PATH=$PATH:{hadoop_bin_dir} ; hdfs --config {hadoop_conf_dir} balancer -threshold {threshold}')
-      return ['su','-',params.hdfs_user,'-c', rebalanceCommand]
+      rebalanceCommand = format('hdfs --config {hadoop_conf_dir} balancer -threshold {threshold}')
+      return as_user(rebalanceCommand, params.hdfs_user, env={'PATH': params.hadoop_bin_dir})
     
     command = startRebalancingProcess(threshold)
     
@@ -130,8 +130,9 @@ class NameNode(Script):
     parser = hdfs_rebalance.HdfsParser()
     proc = subprocess.Popen(
                             command, 
-                            stdout=subprocess.PIPE, 
-                            shell=False,
+                            stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE,
+                            shell=True,
                             close_fds=True,
                             cwd=basedir
                            )

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py

@@ -166,7 +166,7 @@ def service(action=None, name=None, user=None, options="", create_pid_dir=False,
         user = "root"
         
         try:
-          check_process_status()
+          check_process_status(hadoop_secure_dn_pid_file)
           
           custom_export = {
             'HADOOP_SECURE_DN_USER': params.hdfs_user

+ 5 - 4
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/files/removeMysqlUser.sh

@@ -24,9 +24,10 @@ mysqldservice=$1
 mysqldbuser=$2
 userhost=$3
 myhostname=$(hostname -f)
+sudo_prefix = "sudo -H -E"
 
-service $mysqldservice start
+$sudo_prefix service $mysqldservice start
 echo "Removing user $mysqldbuser@$userhost"
-mysql -u root -e "DROP USER '$mysqldbuser'@'$userhost';"
-mysql -u root -e "flush privileges;"
-service $mysqldservice stop
+sudo su mysql -s /bin/bash - -c "mysql -u root -e \"DROP USER '$mysqldbuser'@'$userhost';\""
+sudo su mysql -s /bin/bash - -c "mysql -u root -e \"flush privileges;\""
+$sudo_prefix service $mysqldservice stop

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py

@@ -44,7 +44,7 @@ def hcat_service_check():
             tries=3,
             user=params.smokeuser,
             try_sleep=5,
-            path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin', params.execute_path],
+            path=['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin', params.execute_path],
             logoutput=True)
 
     if params.security_enabled:
@@ -75,6 +75,6 @@ def hcat_service_check():
             tries=3,
             user=params.smokeuser,
             try_sleep=5,
-            path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin', params.execute_path],
+            path=['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin', params.execute_path],
             logoutput=True
     )

+ 4 - 4
ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py

@@ -39,7 +39,7 @@ class TestServiceCheck(RMFTestCase):
     )
     self.assertResourceCalled('Execute', 'sh /tmp/hcatSmoke.sh hcatsmoke prepare',
                         logoutput = True,
-                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+                        path = ['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin'],
                         tries = 3,
                         user = 'ambari-qa',
                         try_sleep = 5,
@@ -54,7 +54,7 @@ class TestServiceCheck(RMFTestCase):
     )
     self.assertResourceCalled('Execute', 'sh /tmp/hcatSmoke.sh hcatsmoke cleanup',
                         logoutput = True,
-                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+                        path = ['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin'],
                         tries = 3,
                         user = 'ambari-qa',
                         try_sleep = 5,
@@ -94,7 +94,7 @@ class TestServiceCheck(RMFTestCase):
     )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; sh /tmp/hcatSmoke.sh hcatsmoke prepare',
                         logoutput = True,
-                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+                        path = ['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin'],
                         tries = 3,
                         user = 'ambari-qa',
                         try_sleep = 5,
@@ -110,7 +110,7 @@ class TestServiceCheck(RMFTestCase):
     )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; sh /tmp/hcatSmoke.sh hcatsmoke cleanup',
                         logoutput = True,
-                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+                        path = ['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin'],
                         tries = 3,
                         user = 'ambari-qa',
                         try_sleep = 5,

+ 4 - 2
ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py

@@ -21,6 +21,8 @@ limitations under the License.
 from mock.mock import MagicMock, call, patch
 from stacks.utils.RMFTestCase import *
 import resource_management.core.source
+import os
+
 
 class TestFlumeHandler(RMFTestCase):
 
@@ -52,7 +54,7 @@ class TestFlumeHandler(RMFTestCase):
     self.assertTrue(set_desired_mock.call_args[0][0] == 'STARTED')
 
 
-    self.assertResourceCalled('Execute', "/usr/bin/sudo su flume -l -s /bin/bash -c 'export  JAVA_HOME=/usr/jdk64/jdk1.7.0_45 > /dev/null ; /usr/bin/flume-ng agent --name a1 --conf /etc/flume/conf/a1 --conf-file /etc/flume/conf/a1/flume.conf -Dflume.monitoring.type=ganglia -Dflume.monitoring.hosts=c6401.ambari.apache.org:8655' &",
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su flume -l -s /bin/bash -c 'export  PATH=/bin JAVA_HOME=/usr/jdk64/jdk1.7.0_45 > /dev/null ; /usr/bin/flume-ng agent --name a1 --conf /etc/flume/conf/a1 --conf-file /etc/flume/conf/a1/flume.conf -Dflume.monitoring.type=ganglia -Dflume.monitoring.hosts=c6401.ambari.apache.org:8655' &",
         environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         wait_for_finish = False,
     )
@@ -264,7 +266,7 @@ class TestFlumeHandler(RMFTestCase):
     self.assert_configure_many()
 
 
-    self.assertResourceCalled('Execute', "/usr/bin/sudo su flume -l -s /bin/bash -c 'export  JAVA_HOME=/usr/jdk64/jdk1.7.0_45 > /dev/null ; /usr/bin/flume-ng agent --name b1 --conf /etc/flume/conf/b1 --conf-file /etc/flume/conf/b1/flume.conf -Dflume.monitoring.type=ganglia -Dflume.monitoring.hosts=c6401.ambari.apache.org:8655' &",
+    self.assertResourceCalled('Execute', "/usr/bin/sudo su flume -l -s /bin/bash -c 'export  PATH=/bin JAVA_HOME=/usr/jdk64/jdk1.7.0_45 > /dev/null ; /usr/bin/flume-ng agent --name b1 --conf /etc/flume/conf/b1 --conf-file /etc/flume/conf/b1/flume.conf -Dflume.monitoring.type=ganglia -Dflume.monitoring.hosts=c6401.ambari.apache.org:8655' &",
         environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         wait_for_finish = False,
     )

+ 7 - 7
ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py

@@ -54,9 +54,9 @@ class TestHBaseMaster(RMFTestCase):
     )
     
     self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf stop master',
-      user = 'hbase',
-      on_timeout = 'ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1 && kill -9 `cat /var/run/hbase/hbase-hbase-master.pid`', 
-      timeout = 30,
+        on_timeout = '! ( ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1 ) || sudo -H -E kill -9 `cat /var/run/hbase/hbase-hbase-master.pid`',
+        timeout = 30,
+        user = 'hbase',
     )
     
     self.assertResourceCalled('Execute', 'rm -f /var/run/hbase/hbase-hbase-master.pid',
@@ -141,11 +141,11 @@ class TestHBaseMaster(RMFTestCase):
     )
 
     self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf stop master',
-      user = 'hbase',
-      on_timeout = 'ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1 && kill -9 `cat /var/run/hbase/hbase-hbase-master.pid`', 
-      timeout = 30,
+        on_timeout = '! ( ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1 ) || sudo -H -E kill -9 `cat /var/run/hbase/hbase-hbase-master.pid`',
+        timeout = 30,
+        user = 'hbase',
     )
-    
+
     self.assertResourceCalled('Execute', 'rm -f /var/run/hbase/hbase-hbase-master.pid',
     )
     self.assertNoMoreResources()

+ 6 - 6
ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py

@@ -54,9 +54,9 @@ class TestHbaseRegionServer(RMFTestCase):
     )
     
     self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf stop regionserver',
-      user = 'hbase',
-      on_timeout = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1 && kill -9 `cat /var/run/hbase/hbase-hbase-regionserver.pid`', 
-      timeout = 30,
+        on_timeout = '! ( ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1 ) || sudo -H -E kill -9 `cat /var/run/hbase/hbase-hbase-regionserver.pid`',
+        timeout = 30,
+        user = 'hbase',
     )
     
     self.assertResourceCalled('Execute', 'rm -f /var/run/hbase/hbase-hbase-regionserver.pid',
@@ -95,9 +95,9 @@ class TestHbaseRegionServer(RMFTestCase):
     )
 
     self.assertResourceCalled('Execute', '/usr/lib/hbase/bin/hbase-daemon.sh --config /etc/hbase/conf stop regionserver',
-      user = 'hbase',
-      on_timeout = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1 && kill -9 `cat /var/run/hbase/hbase-hbase-regionserver.pid`', 
-      timeout = 30,
+        on_timeout = '! ( ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1 ) || sudo -H -E kill -9 `cat /var/run/hbase/hbase-hbase-regionserver.pid`',
+        timeout = 30,
+        user = 'hbase',
     )
     
     self.assertResourceCalled('Execute', 'rm -f /var/run/hbase/hbase-hbase-regionserver.pid',

+ 24 - 24
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py

@@ -300,11 +300,11 @@ class TestNamenode(RMFTestCase):
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
     )
     self.assertResourceCalled('Execute', "hadoop dfsadmin -safemode get | grep 'Safe mode is OFF'",
-                              path = ['/usr/bin'],
-                              tries = 40,
-                              only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/usr/bin > /dev/null ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
-                              user = 'hdfs',
-                              try_sleep = 10,
+        path = ['/usr/bin'],
+        tries = 40,
+        only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin > /dev/null ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+        user = 'hdfs',
+        try_sleep = 10,
     )
     self.assertResourceCalled('HdfsDirectory', '/tmp',
                               security_enabled = False,
@@ -329,15 +329,15 @@ class TestNamenode(RMFTestCase):
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              action = ['create'],
-                              bin_dir = '/usr/bin',
-                              only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/usr/bin > /dev/null ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
-                              )
+        security_enabled = False,
+        keytab = UnknownConfigurationMock(),
+        conf_dir = '/etc/hadoop/conf',
+        hdfs_user = 'hdfs',
+        kinit_path_local = '/usr/bin/kinit',
+        action = ['create'],
+        bin_dir = '/usr/bin',
+        only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin > /dev/null ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+    )
     self.assertNoMoreResources()
 
   def test_start_ha_secured(self):
@@ -379,7 +379,7 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('Execute', "hadoop dfsadmin -safemode get | grep 'Safe mode is OFF'",
         path = ['/usr/bin'],
         tries = 40,
-        only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/usr/bin > /dev/null ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+        only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin > /dev/null ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
         user = 'hdfs',
         try_sleep = 10,
     )
@@ -406,15 +406,15 @@ class TestNamenode(RMFTestCase):
                               action = ['create_delayed'],
                               )
     self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              action = ['create'],
-                              bin_dir = '/usr/bin',
-                              only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/usr/bin > /dev/null ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
-                             )
+        security_enabled = True,
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        conf_dir = '/etc/hadoop/conf',
+        hdfs_user = 'hdfs',
+        kinit_path_local = '/usr/bin/kinit',
+        action = ['create'],
+        bin_dir = '/usr/bin',
+        only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin > /dev/null ; hdfs --config /etc/hadoop/conf haadmin -getServiceState nn1 | grep active'",
+    )
     self.assertNoMoreResources()
 
   def test_decommission_default(self):

+ 2 - 2
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py

@@ -42,7 +42,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
         not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
         environment = {'HADOOP_HOME' : '/usr'},
-        path = [os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"],
+        path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
         user = 'hive',
     )
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
@@ -87,7 +87,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
         not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
         environment = {'HADOOP_HOME' : '/usr'},
-        path = [os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"],
+        path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
         user = 'hive',
     )
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',

+ 3 - 3
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py

@@ -51,13 +51,13 @@ class TestHiveServer(RMFTestCase):
 
     self.assert_configure_default()
     self.assertResourceCalled('Execute', 'metatool -updateLocation hdfs://c6401.ambari.apache.org:8020/apps/hive/warehouse ',
-        environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"},
+        environment = {'PATH' : "/bin:/usr/lib/hive/bin:/usr/bin"},
         user = 'hive',
     )
     self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
                               not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
                               environment = {'HADOOP_HOME' : '/usr'},
-                              path = [os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"],
+                              path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
                               user = 'hive'
     )
 
@@ -112,7 +112,7 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
                               not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
                               environment = {'HADOOP_HOME' : '/usr'},
-                              path = [os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"],
+                              path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
                               user = 'hive'
     )
 

+ 47 - 35
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py

@@ -39,27 +39,35 @@ class TestServiceCheck(RMFTestCase):
                         mode = 0755,
     )
     self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/hcatSmoke.sh hcatsmoke prepare',
-                        logoutput = True,
-                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin', os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"],
-                        tries = 3,
-                        user = 'ambari-qa',
-                        try_sleep = 5,
+        logoutput = True,
+        path = ['/usr/sbin',
+           '/usr/local/bin',
+           '/bin',
+           '/usr/bin',
+           '/bin:/usr/lib/hive/bin:/usr/bin'],
+        tries = 3,
+        user = 'ambari-qa',
+        try_sleep = 5,
     )
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /apps/hive/warehouse/hcatsmoke',
-                        logoutput = True,
-                        user = 'hdfs',
-                        conf_dir = '/etc/hadoop/conf',
-                        keytab=UnknownConfigurationMock(),
-                        kinit_path_local='/usr/bin/kinit',
-                        bin_dir = os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin",
-                        security_enabled=False
+        security_enabled = False,
+        keytab = UnknownConfigurationMock(),
+        conf_dir = '/etc/hadoop/conf',
+        logoutput = True,
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        bin_dir = '/bin:/usr/lib/hive/bin:/usr/bin',
     )
     self.assertResourceCalled('Execute', ' /tmp/hcatSmoke.sh hcatsmoke cleanup',
-                        logoutput = True,
-                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin', os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"],
-                        tries = 3,
-                        user = 'ambari-qa',
-                        try_sleep = 5,
+        logoutput = True,
+        path = ['/usr/sbin',
+           '/usr/local/bin',
+           '/bin',
+           '/usr/bin',
+           '/bin:/usr/lib/hive/bin:/usr/bin'],
+        tries = 3,
+        user = 'ambari-qa',
+        try_sleep = 5,
     )
     self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
                               content = StaticFile('templetonSmoke.sh'),
@@ -96,28 +104,32 @@ class TestServiceCheck(RMFTestCase):
     )
     self.maxDiff = None
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/hcatSmoke.sh hcatsmoke prepare',
-                        logoutput = True,
-                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin', os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"],
-                        tries = 3,
-                        user = 'ambari-qa',
-                        try_sleep = 5,
+        logoutput = True,
+        path = ['/usr/sbin','/usr/local/bin','/bin','/usr/bin', '/bin:/usr/lib/hive/bin:/usr/bin'],
+        tries = 3,
+        user = 'ambari-qa',
+        try_sleep = 5,
     )
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /apps/hive/warehouse/hcatsmoke',
-                        logoutput = True,
-                        user = 'hdfs',
-                        conf_dir = '/etc/hadoop/conf',
-                        keytab='/etc/security/keytabs/hdfs.headless.keytab',
-                        kinit_path_local='/usr/bin/kinit',
-                        security_enabled=True,
-                        bin_dir = os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin",
-                        principal='hdfs'
+        security_enabled = True,
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        conf_dir = '/etc/hadoop/conf',
+        logoutput = True,
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        bin_dir = '/bin:/usr/lib/hive/bin:/usr/bin',
+        principal = 'hdfs',
     )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa;  /tmp/hcatSmoke.sh hcatsmoke cleanup',
-                        logoutput = True,
-                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin', os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"],
-                        tries = 3,
-                        user = 'ambari-qa',
-                        try_sleep = 5,
+        logoutput = True,
+        path = ['/usr/sbin',
+           '/usr/local/bin',
+           '/bin',
+           '/usr/bin',
+           '/bin:/usr/lib/hive/bin:/usr/bin'],
+        tries = 3,
+        user = 'ambari-qa',
+        try_sleep = 5,
     )
     self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
                               content = StaticFile('templetonSmoke.sh'),

+ 8 - 8
ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py

@@ -38,13 +38,13 @@ class TestServiceCheck(RMFTestCase):
                       try_sleep = 5,
                       tries = 1,
                       user = 'ambari-qa',
-                      bin_dir =  os.environ['PATH'] + os.pathsep + "/usr/bin" + os.pathsep + "/usr/lib/hadoop-yarn/bin",
+                      bin_dir = "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin",
                       conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('ExecuteHadoop', 'fs -put /etc/passwd /user/ambari-qa/mapredsmokeinput',
                       try_sleep = 5,
                       tries = 1,
-                      bin_dir =  os.environ['PATH'] + os.pathsep + "/usr/bin" + os.pathsep + "/usr/lib/hadoop-yarn/bin",
+                      bin_dir = "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin",
                       user = 'ambari-qa',
                       conf_dir = '/etc/hadoop/conf',
     )
@@ -52,13 +52,13 @@ class TestServiceCheck(RMFTestCase):
                       logoutput = True,
                       try_sleep = 5,
                       tries = 1,
-                      bin_dir =  os.environ['PATH'] + os.pathsep + "/usr/bin" + os.pathsep + "/usr/lib/hadoop-yarn/bin",
+                      bin_dir = "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin",
                       user = 'ambari-qa',
                       conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/mapredsmokeoutput',
                       user = 'ambari-qa',
-                      bin_dir =  os.environ['PATH'] + os.pathsep + "/usr/bin" + os.pathsep + "/usr/lib/hadoop-yarn/bin",
+                      bin_dir = "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin",
                       conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()
@@ -77,13 +77,13 @@ class TestServiceCheck(RMFTestCase):
                       try_sleep = 5,
                       tries = 1,
                       user = 'ambari-qa',
-                      bin_dir =  os.environ['PATH'] + os.pathsep + "/usr/bin" + os.pathsep + "/usr/lib/hadoop-yarn/bin",
+                      bin_dir = "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin",
                       conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('ExecuteHadoop', 'fs -put /etc/passwd /user/ambari-qa/mapredsmokeinput',
                       try_sleep = 5,
                       tries = 1,
-                      bin_dir =  os.environ['PATH'] + os.pathsep + "/usr/bin" + os.pathsep + "/usr/lib/hadoop-yarn/bin",
+                      bin_dir = "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin",
                       user = 'ambari-qa',
                       conf_dir = '/etc/hadoop/conf',
     )
@@ -91,13 +91,13 @@ class TestServiceCheck(RMFTestCase):
                       logoutput = True,
                       try_sleep = 5,
                       tries = 1,
-                      bin_dir =  os.environ['PATH'] + os.pathsep + "/usr/bin" + os.pathsep + "/usr/lib/hadoop-yarn/bin",
+                      bin_dir = "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin",
                       user = 'ambari-qa',
                       conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/mapredsmokeoutput',
                       user = 'ambari-qa',
-                      bin_dir =  os.environ['PATH'] + os.pathsep + "/usr/bin" + os.pathsep + "/usr/lib/hadoop-yarn/bin",
+                      bin_dir = "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin",
                       conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()

+ 2 - 2
ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py

@@ -124,7 +124,7 @@ class TestResourceManager(RMFTestCase):
         group = 'hadoop',
     )
     self.assertResourceCalled('Execute', ' yarn --config /etc/hadoop/conf rmadmin -refreshNodes',
-        environment = {'PATH': os.environ['PATH'] + ":/usr/bin:/usr/lib/hadoop-yarn/bin"},
+        environment = {'PATH': "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin"},
         user = 'yarn',
     )
     self.assertNoMoreResources()
@@ -141,7 +141,7 @@ class TestResourceManager(RMFTestCase):
         group = 'hadoop',
     )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/rm.service.keytab rm/c6401.ambari.apache.org@EXAMPLE.COM; yarn --config /etc/hadoop/conf rmadmin -refreshNodes',
-        environment = {'PATH': os.environ['PATH'] + ":/usr/bin:/usr/lib/hadoop-yarn/bin"},
+        environment = {'PATH': "/bin:/usr/bin:/usr/lib/hadoop-yarn/bin"},
         user = 'yarn',
     )
     

+ 2 - 2
ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_service_check.py

@@ -43,7 +43,7 @@ class TestServiceCheck(RMFTestCase):
                           try_sleep = 5,
     )
     self.assertResourceCalled('Execute', 'yarn --config /etc/hadoop/conf node -list',
-                              path = [os.environ['PATH'] + os.pathsep + "/usr/bin" + os.pathsep + "/usr/lib/hadoop-yarn/bin"],
+                              path = ["/bin:/usr/bin:/usr/lib/hadoop-yarn/bin"],
                               user = 'ambari-qa',
     )
     self.assertNoMoreResources()
@@ -66,7 +66,7 @@ class TestServiceCheck(RMFTestCase):
                           try_sleep = 5,
     )
     self.assertResourceCalled('Execute', 'yarn --config /etc/hadoop/conf node -list',
-                              path = [os.environ['PATH'] + os.pathsep + "/usr/bin" + os.pathsep + "/usr/lib/hadoop-yarn/bin"],
+                          path = ["/bin:/usr/bin:/usr/lib/hadoop-yarn/bin"],
                           user = 'ambari-qa',
     )
     self.assertNoMoreResources()

+ 2 - 2
ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py

@@ -42,7 +42,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
                               not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
                               environment = {'HADOOP_HOME': '/usr'},
-                              path = [os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"],
+                              path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
                               user = 'hive'
     )
 
@@ -89,7 +89,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
                               not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
                               environment = {'HADOOP_HOME' : '/usr'},
-                              path = [os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin" + os.pathsep + "/usr/bin"],
+                              path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
                               user = 'hive'
     )
 

+ 3 - 1
ambari-server/src/test/python/stacks/utils/RMFTestCase.py

@@ -57,6 +57,7 @@ class RMFTestCase(TestCase):
                     shell_mock_value = (0, "OK."), 
                     os_type=('Suse','11','Final'),
                     kinit_path_local="/usr/bin/kinit",
+                    os_env={'PATH':'/bin'},
                     target=TARGET_STACKS
                     ):
     norm_path = os.path.normpath(path)
@@ -116,7 +117,8 @@ class RMFTestCase(TestCase):
             with patch.object(Script, 'install_packages'):
               with patch('resource_management.libraries.functions.get_kinit_path', return_value=kinit_path_local):
                 with patch.object(platform, 'linux_distribution', return_value=os_type):
-                  method(RMFTestCase.env)
+                  with patch.object(os, "environ", new=os_env):
+                    method(RMFTestCase.env)
     sys.path.remove(scriptsdir)
   
   def getConfig(self):

+ 9 - 9
ambari-web/app/messages.js

@@ -1079,9 +1079,9 @@ Em.I18n.translations = {
     '<ol>' +
       '<li>Login to the NameNode host <b>{1}</b>.</li>' +
       '<li>Put the NameNode in Safe Mode (read-only mode):' +
-      '<div class="code-snippet">sudo su -l {0} -c \'hdfs dfsadmin -safemode enter\'</div></li>' +
+      '<div class="code-snippet">sudo su {0} -l -c \'hdfs dfsadmin -safemode enter\'</div></li>' +
       '<li>Once in Safe Mode, create a Checkpoint:' +
-      '<div class="code-snippet">sudo su -l {0} -c \'hdfs dfsadmin -saveNamespace\'</div></li>' +
+      '<div class="code-snippet">sudo su {0} -l -c \'hdfs dfsadmin -saveNamespace\'</div></li>' +
       '</ol>',
 
   'admin.highAvailability.wizard.step8.body':
@@ -1089,7 +1089,7 @@ Em.I18n.translations = {
     '<ol>' +
     '<li>Login to the NameNode host <b>{1}</b>.</li>' +
     '<li>Initialize the metadata for NameNode automatic failover by running:' +
-    '<div class="code-snippet">sudo su -l {0} -c \'hdfs zkfc -formatZK\'</div></li>' +
+    '<div class="code-snippet">sudo su {0} -l -c \'hdfs zkfc -formatZK\'</div></li>' +
     '</div>' +
     '<div class="alert alert-info">' +
     '<ol start="3">' +
@@ -1097,7 +1097,7 @@ Em.I18n.translations = {
     '<div class="alert alert-warn"><strong>Important!</strong> Be sure to login to the Additional NameNode host.<br>This is a different host from the Steps 1 and 2 above.</div>' +
     '</li>' +
     '<li>Initialize the metadata for the Additional NameNode by running:' +
-    '<div class="code-snippet">sudo su -l {0} -c \'hdfs namenode -bootstrapStandby\'</div></li>' +
+    '<div class="code-snippet">sudo su {0} -l -c \'hdfs namenode -bootstrapStandby\'</div></li>' +
     '</ol>' +
     '</div>' +
     'Please proceed once you have completed the steps above.',
@@ -1105,16 +1105,16 @@ Em.I18n.translations = {
     '<ol>' +
     '<li>Login to the NameNode host <b>{1}</b>.</li>' +
     '<li>Initialize the JournalNodes by running:' +
-    '<div class="code-snippet">sudo su -l {0} -c \'hdfs namenode -initializeSharedEdits\'</div></li>' +
+    '<div class="code-snippet">sudo su {0} -l -c \'hdfs namenode -initializeSharedEdits\'</div></li>' +
     '<li>You will be able to proceed once Ambari detects that the JournalNodes have been initialized successfully.</li>' +
     '</ol>',
   'admin.highAvailability.wizard.step4.body':
     '<ol>' +
     '<li>Login to the NameNode host <b>{1}</b>.</li>' +
     '<li>Put the NameNode in Safe Mode (read-only mode):' +
-    '<div class="code-snippet">sudo su -l {0} -c \'hdfs dfsadmin -safemode enter\'</div></li>' +
+    '<div class="code-snippet">sudo su {0} -l -c \'hdfs dfsadmin -safemode enter\'</div></li>' +
     '<li>Once in Safe Mode, create a Checkpoint:' +
-    '<div class="code-snippet">sudo su -l {0} -c \'hdfs dfsadmin -saveNamespace\'</div></li>' +
+    '<div class="code-snippet">sudo su {0} -l -c \'hdfs dfsadmin -saveNamespace\'</div></li>' +
     '<li>You will be able to proceed once Ambari detects that the NameNode is in Safe Mode and the Checkpoint has been created successfully.</li>'+
     '<div class="alert alert-warn">If the <b>Next</b> button is enabled before you run the <b>"Step 3: Create a Checkpoint"</b> command, it means there is a recent Checkpoint already and you may proceed without running the <b>"Step 3: Create a Checkpoint"</b> command.</div>' +
     '</ol>',
@@ -1748,7 +1748,7 @@ Em.I18n.translations = {
       '<ol>' +
       '<li>Login to the NameNode host <b>{4}</b>.</li>' +
       '<li>Reset automatic failover information in ZooKeeper by running:' +
-      '<div class="code-snippet">sudo su -l {3} -c \'hdfs zkfc -formatZK\'</div></li>' +
+      '<div class="code-snippet">sudo su {3} -l -c \'hdfs zkfc -formatZK\'</div></li>' +
       '</ol>' +
       '</div>' +
       '<div class="alert alert-info">' +
@@ -1757,7 +1757,7 @@ Em.I18n.translations = {
       '<div class="alert alert-warn"><strong>Important!</strong> Be sure to login to the newly installed NameNode host.<br>This is a different host from the Steps 1 and 2 above.</div>' +
       '</li>' +
       '<li>Initialize the metadata by running:' +
-      "<div class='code-snippet'>sudo su -l {3} -c 'hdfs namenode -bootstrapStandby'</div></li>" +
+      "<div class='code-snippet'>sudo su {3} -l -c 'hdfs namenode -bootstrapStandby'</div></li>" +
       '</ol>' +
       '</div>',
   'services.reassign.step5.body.secondary_namenode':