Prechádzať zdrojové kódy

AMBARI-6408. Ability to customize /tmp usage for ambari (mahadev via aonishuk)

Andrew Onishuk 11 rokov pred
rodič
commit
e3a93a1db0
47 zmenil súbory, kde vykonal 120 pridanie a 75 odobranie
  1. 1 0
      ambari-agent/conf/unix/ambari-agent.ini
  2. 6 0
      ambari-agent/pom.xml
  3. 1 0
      ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
  4. 2 1
      ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
  5. 2 2
      ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
  6. 15 4
      ambari-agent/src/main/python/resource_management/libraries/script/script.py
  7. 5 3
      ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
  8. 3 3
      ambari-agent/src/test/python/ambari_agent/TestPythonExecutor.py
  9. 1 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
  10. 4 2
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
  11. 1 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/params.py
  12. 4 4
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/service_check.py
  13. 2 2
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py
  14. 1 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
  15. 2 2
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/service_check.py
  16. 3 3
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat_service_check.py
  17. 6 5
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
  18. 1 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
  19. 4 4
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/service_check.py
  20. 1 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/params.py
  21. 2 2
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/service_check.py
  22. 1 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py
  23. 2 2
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/service_check.py
  24. 1 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/package/scripts/params.py
  25. 2 2
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/package/scripts/service_check.py
  26. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
  27. 4 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
  28. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
  29. 4 4
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/service_check.py
  30. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
  31. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
  32. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/service_check.py
  33. 3 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
  34. 6 5
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
  35. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
  36. 4 4
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/service_check.py
  37. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
  38. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/service_check.py
  39. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
  40. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/service_check.py
  41. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
  42. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service_check.py
  43. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
  44. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/service_check.py
  45. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/params.py
  46. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/service_check.py
  47. 5 4
      ambari-server/src/test/python/stacks/utils/RMFTestCase.py

+ 1 - 0
ambari-agent/conf/unix/ambari-agent.ini

@@ -19,6 +19,7 @@ secured_url_port=8441
 
 [agent]
 prefix=/var/lib/ambari-agent/data
+tmp_dir=/var/lib/ambari-agent/data/tmp
 ;loglevel=(DEBUG/INFO)
 loglevel=INFO
 data_cleanup_interval=86400

+ 6 - 0
ambari-agent/pom.xml

@@ -326,6 +326,12 @@
               <username>root</username>
               <groupname>root</groupname>
             </mapping>
+            <mapping>
+              <directory>/var/lib/${project.artifactId}/data/tmp</directory>
+              <filemode>755</filemode>
+              <username>root</username>
+              <groupname>root</groupname>
+            </mapping>
             <mapping>
               <directory>/var/lib/${project.artifactId}/keys</directory>
               <filemode>755</filemode>

+ 1 - 0
ambari-agent/src/main/python/ambari_agent/AmbariConfig.py

@@ -31,6 +31,7 @@ secured_url_port=8441
 
 [agent]
 prefix=/tmp/ambari-agent
+tmp_dir=/tmp/ambari-agent/tmp # For test purposes
 data_cleanup_interval=86400
 data_cleanup_max_age=2592000
 ping_port=8670

+ 2 - 1
ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py

@@ -52,6 +52,7 @@ class CustomServiceOrchestrator():
   def __init__(self, config, controller):
     self.config = config
     self.tmp_dir = config.get('agent', 'prefix')
+    self.exec_tmp_dir = config.get('agent', 'tmp_dir')
     self.file_cache = FileCache(config)
     self.python_executor = PythonExecutor(self.tmp_dir, config)
     self.status_commands_stdout = os.path.join(self.tmp_dir,
@@ -130,7 +131,7 @@ class CustomServiceOrchestrator():
       for py_file, current_base_dir in filtered_py_file_list:
         script_params = [command_name, json_path, current_base_dir]
         ret = self.python_executor.run_file(py_file, script_params,
-                               tmpoutfile, tmperrfile, timeout,
+                               self.exec_tmp_dir, tmpoutfile, tmperrfile, timeout,
                                tmpstrucoutfile, logger_level, override_output_files)
         # Next run_file() invocations should always append to current output
         override_output_files = False

+ 2 - 2
ambari-agent/src/main/python/ambari_agent/PythonExecutor.py

@@ -47,7 +47,7 @@ class PythonExecutor:
     self.config = config
     pass
 
-  def run_file(self, script, script_params, tmpoutfile, tmperrfile, timeout,
+  def run_file(self, script, script_params, tmp_dir, tmpoutfile, tmperrfile, timeout,
                tmpstructedoutfile, logger_level, override_output_files = True):
     """
     Executes the specified python file in a separate subprocess.
@@ -73,7 +73,7 @@ class PythonExecutor:
     except OSError:
       pass # no error
 
-    script_params += [tmpstructedoutfile, logger_level]
+    script_params += [tmpstructedoutfile, logger_level, tmp_dir]
     pythonCommand = self.python_command(script, script_params)
     logger.info("Running command " + pprint.pformat(pythonCommand))
     process = self.launch_python_subprocess(pythonCommand, tmpout, tmperr)

+ 15 - 4
ambari-agent/src/main/python/resource_management/libraries/script/script.py

@@ -30,13 +30,14 @@ from resource_management.core.exceptions import Fail, ClientComponentHasNoStatus
 from resource_management.core.resources.packaging import Package
 from resource_management.libraries.script.config_dictionary import ConfigDictionary
 
-USAGE = """Usage: {0} <COMMAND> <JSON_CONFIG> <BASEDIR> <STROUTPUT> <LOGGING_LEVEL>
+USAGE = """Usage: {0} <COMMAND> <JSON_CONFIG> <BASEDIR> <STROUTPUT> <LOGGING_LEVEL> <TMP_DIR>
 
 <COMMAND> command type (INSTALL/CONFIGURE/START/STOP/SERVICE_CHECK...)
 <JSON_CONFIG> path to command json file. Ex: /var/lib/ambari-agent/data/command-2.json
 <BASEDIR> path to service metadata dir. Ex: /var/lib/ambari-agent/cache/stacks/HDP/2.0.6/services/HDFS
 <STROUTPUT> path to file with structured command output (file will be created). Ex:/tmp/my.txt
 <LOGGING_LEVEL> log level for stdout. Ex:DEBUG,INFO
+<TMP_DIR> temporary directory for executable scripts. Ex: /var/lib/ambari-agent/data/tmp
 """
 
 class Script(object):
@@ -82,8 +83,8 @@ class Script(object):
     logger.addHandler(chout)
     
     # parse arguments
-    if len(sys.argv) < 6: 
-     logger.error("Script expects at least 5 arguments")
+    if len(sys.argv) < 7: 
+     logger.error("Script expects at least 6 arguments")
      print USAGE.format(os.path.basename(sys.argv[0])) # print to stdout
      sys.exit(1)
     
@@ -92,7 +93,8 @@ class Script(object):
     basedir = sys.argv[3]
     self.stroutfile = sys.argv[4]
     logging_level = sys.argv[5]
-    
+    Script.tmp_dir = sys.argv[6]
+
     logging_level_str = logging._levelNames[logging_level]
     chout.setLevel(logging_level_str)
     logger.setLevel(logging_level_str)
@@ -139,6 +141,15 @@ class Script(object):
     return Script.config
 
 
+  @staticmethod
+  def get_tmp_dir():
+    """
+    HACK. Uses static field to avoid "circular dependency" issue when
+    importing params.py.
+    """
+    return Script.tmp_dir
+
+
   def install(self, env):
     """
     Default implementation of install command is to install all packages

+ 5 - 3
ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py

@@ -47,9 +47,11 @@ class TestCustomServiceOrchestrator(TestCase):
     sys.stdout = out
     # generate sample config
     tmpdir = tempfile.gettempdir()
+    exec_tmp_dir = os.path.join(tmpdir, 'tmp')
     self.config = ConfigParser.RawConfigParser()
     self.config.add_section('agent')
     self.config.set('agent', 'prefix', tmpdir)
+    self.config.set('agent', 'tmp_dir', exec_tmp_dir)
     self.config.set('agent', 'cache_dir', "/cachedir")
     self.config.add_section('python')
     self.config.set('python', 'custom_actions_dir', tmpdir)
@@ -206,9 +208,9 @@ class TestCustomServiceOrchestrator(TestCase):
     ret = orchestrator.runCommand(command, "out.txt", "err.txt",
               forced_command_name=CustomServiceOrchestrator.COMMAND_NAME_STATUS)
     ## Check that override_output_files was true only during first call
-    self.assertEquals(run_file_mock.call_args_list[0][0][7], True)
-    self.assertEquals(run_file_mock.call_args_list[1][0][7], False)
-    self.assertEquals(run_file_mock.call_args_list[2][0][7], False)
+    self.assertEquals(run_file_mock.call_args_list[0][0][8], True)
+    self.assertEquals(run_file_mock.call_args_list[1][0][8], False)
+    self.assertEquals(run_file_mock.call_args_list[2][0][8], False)
     ## Check that forced_command_name was taken into account
     self.assertEqual(run_file_mock.call_args_list[0][0][1][0],
                                   CustomServiceOrchestrator.COMMAND_NAME_STATUS)

+ 3 - 3
ambari-agent/src/test/python/ambari_agent/TestPythonExecutor.py

@@ -56,7 +56,7 @@ class TestPythonExecutor(TestCase):
     executor.runShellKillPgrp = runShellKillPgrp_method
     subproc_mock.returncode = None
     thread = Thread(target =  executor.run_file, args = ("fake_puppetFile",
-      ["arg1", "arg2"], tmpoutfile, tmperrfile, PYTHON_TIMEOUT_SECONDS, tmpstrucout,"INFO"))
+      ["arg1", "arg2"], "/fake_tmp_dir", tmpoutfile, tmperrfile, PYTHON_TIMEOUT_SECONDS, tmpstrucout,"INFO"))
     thread.start()
     time.sleep(0.1)
     subproc_mock.finished_event.wait()
@@ -84,7 +84,7 @@ class TestPythonExecutor(TestCase):
     executor.runShellKillPgrp = runShellKillPgrp_method
     subproc_mock.returncode = 0
     thread = Thread(target =  executor.run_file, args = ("fake_puppetFile", ["arg1", "arg2"],
-                                                      tmpoutfile, tmperrfile,
+                                                      "/fake_tmp_dir", tmpoutfile, tmperrfile,
                                                       PYTHON_TIMEOUT_SECONDS, tmpstrucout, "INFO"))
     thread.start()
     time.sleep(0.1)
@@ -112,7 +112,7 @@ class TestPythonExecutor(TestCase):
     executor.runShellKillPgrp = runShellKillPgrp_method
     subproc_mock.returncode = 0
     subproc_mock.should_finish_event.set()
-    result = executor.run_file("file", ["arg1", "arg2"], tmpoutfile, tmperrfile, PYTHON_TIMEOUT_SECONDS, tmpstroutfile, "INFO")
+    result = executor.run_file("file", ["arg1", "arg2"], "/fake_tmp_dir", tmpoutfile, tmperrfile, PYTHON_TIMEOUT_SECONDS, tmpstroutfile, "INFO")
     self.assertEquals(result, {'exitcode': 0, 'stderr': 'Dummy err', 'stdout': 'Dummy output',
                                'structuredOut': {}})
 

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py

@@ -22,6 +22,7 @@ from resource_management.core.system import System
 import os
 
 config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
 
 #java params
 artifact_dir = "/tmp/HDP-artifacts/"

+ 4 - 2
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py

@@ -125,10 +125,12 @@ def set_uid(user, user_dirs):
   """
   user_dirs - comma separated directories
   """
-  File("/tmp/changeUid.sh",
+  import params
+
+  File(format("{tmp_dir}/changeUid.sh"),
        content=StaticFile("changeToSecureUid.sh"),
        mode=0555)
-  Execute(format("/tmp/changeUid.sh {user} {user_dirs} 2>/dev/null"),
+  Execute(format("{tmp_dir}/changeUid.sh {user} {user_dirs} 2>/dev/null"),
           not_if = format("test $(id -u {user}) -gt 1000"))
 
 def setup_java():

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/params.py

@@ -24,6 +24,7 @@ import status_params
 
 # server configurations
 config = Script.get_config()
+exec_tmp_dir = Script.get_tmp_dir()
 
 hbase_conf_dir = "/etc/hbase/conf"
 daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"

+ 4 - 4
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/service_check.py

@@ -30,9 +30,9 @@ class HbaseServiceCheck(Script):
     output_file = "/apps/hbase/data/ambarismoketest"
     test_cmd = format("fs -test -e {output_file}")
     smokeuser_kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smoke_test_user};") if params.security_enabled else ""
-    hbase_servicecheck_file = '/tmp/hbase-smoke.sh'
+    hbase_servicecheck_file = format("{exec_tmp_dir}/hbase-smoke.sh")
   
-    File( '/tmp/hbaseSmokeVerify.sh',
+    File( format("{exec_tmp_dir}/hbaseSmokeVerify.sh"),
       content = StaticFile("hbaseSmokeVerify.sh"),
       mode = 0755
     )
@@ -43,7 +43,7 @@ class HbaseServiceCheck(Script):
     )
     
     if params.security_enabled:    
-      hbase_grant_premissions_file = '/tmp/hbase_grant_permissions.sh'
+      hbase_grant_premissions_file = format("{exec_tmp_dir}/hbase_grant_permissions.sh")
       grantprivelegecmd = format("{kinit_cmd} hbase shell {hbase_grant_premissions_file}")
   
       File( hbase_grant_premissions_file,
@@ -58,7 +58,7 @@ class HbaseServiceCheck(Script):
       )
 
     servicecheckcmd = format("{smokeuser_kinit_cmd} hbase --config {hbase_conf_dir} shell {hbase_servicecheck_file}")
-    smokeverifycmd = format("{smokeuser_kinit_cmd} /tmp/hbaseSmokeVerify.sh {hbase_conf_dir} {service_check_data}")
+    smokeverifycmd = format("{smokeuser_kinit_cmd} {exec_tmp_dir}/hbaseSmokeVerify.sh {hbase_conf_dir} {service_check_data}")
   
     Execute( servicecheckcmd,
       tries     = 3,

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py

@@ -95,11 +95,11 @@ def format_namenode(force=None):
       ExecuteHadoop('namenode -format',
                     kinit_override=True)
     else:
-      File('/tmp/checkForFormat.sh',
+      File(format("{tmp_dir}/checkForFormat.sh"),
            content=StaticFile("checkForFormat.sh"),
            mode=0755)
       Execute(format(
-        "sh /tmp/checkForFormat.sh {hdfs_user} {hadoop_conf_dir} {mark_dir} "
+        "sh {tmp_dir}/checkForFormat.sh {hdfs_user} {hadoop_conf_dir} {mark_dir} "
         "{dfs_name_dir}"),
               not_if=format("test -d {mark_dir}"),
               path="/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin")

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py

@@ -22,6 +22,7 @@ import status_params
 import os
 
 config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
 
 if System.get_instance().os_type == "oraclelinux":
   ulimit_cmd = ''

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/service_check.py

@@ -26,7 +26,7 @@ class HdfsServiceCheck(Script):
 
     env.set_params(params)
     unique = functions.get_unique_id_and_date()
-    dir = '/tmp'
+    dir = params.tmp_dir
     tmp_file = format("{dir}/{unique}")
 
     safemode_command = "dfsadmin -safemode get | grep OFF"
@@ -76,7 +76,7 @@ class HdfsServiceCheck(Script):
       journalnode_port = params.journalnode_port
       smoke_test_user = params.smoke_user
       checkWebUIFileName = "checkWebUI.py"
-      checkWebUIFilePath = format("/tmp/{checkWebUIFileName}")
+      checkWebUIFilePath = format("{tmp_dir}/{checkWebUIFileName}")
       comma_sep_jn_hosts = ",".join(params.journalnode_hosts)
       checkWebUICmd = format(
         "su - {smoke_test_user} -c 'python {checkWebUIFilePath} -m "

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat_service_check.py

@@ -34,12 +34,12 @@ def hcat_service_check():
     else:
       kinit_cmd = ""
 
-    File('/tmp/hcatSmoke.sh',
+    File(format("{tmp_dir}/hcatSmoke.sh"),
          content=StaticFile("hcatSmoke.sh"),
          mode=0755
     )
 
-    prepare_cmd = format("{kinit_cmd}sh /tmp/hcatSmoke.sh hcatsmoke{unique} prepare")
+    prepare_cmd = format("{kinit_cmd}sh {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} prepare")
 
     Execute(prepare_cmd,
             tries=3,
@@ -57,7 +57,7 @@ def hcat_service_check():
                   keytab=params.hdfs_user_keytab
     )
 
-    cleanup_cmd = format("{kinit_cmd}sh /tmp/hcatSmoke.sh hcatsmoke{unique} cleanup")
+    cleanup_cmd = format("{kinit_cmd}sh {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} cleanup")
 
     Execute(cleanup_cmd,
             tries=3,

+ 6 - 5
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py

@@ -23,6 +23,7 @@ import status_params
 
 # server configurations
 config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
 
 hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
 hive_server_conf_dir = "/etc/hive/conf.server"
@@ -57,8 +58,8 @@ hive_server_port = default('/configurations/hive-site/hive.server2.thrift.port',
 hive_url = format("jdbc:hive2://{hive_server_host}:{hive_server_port}")
 
 smokeuser = config['configurations']['hadoop-env']['smokeuser']
-smoke_test_sql = "/tmp/hiveserver2.sql"
-smoke_test_path = "/tmp/hiveserver2Smoke.sh"
+smoke_test_sql = format("{tmp_dir}/hiveserver2.sql")
+smoke_test_path = format("{tmp_dir}/hiveserver2Smoke.sh")
 smoke_user_keytab = config['configurations']['hadoop-env']['smokeuser_keytab']
 
 _authentication = config['configurations']['core-site']['hadoop.security.authentication']
@@ -97,8 +98,8 @@ target = format("{hive_lib}/{jdbc_jar_name}")
 jdk_location = config['hostLevelParams']['jdk_location']
 driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
 
-start_hiveserver2_path = "/tmp/start_hiveserver2_script"
-start_metastore_path = "/tmp/start_metastore_script"
+start_hiveserver2_path = format("{tmp_dir}/start_hiveserver2_script")
+start_metastore_path = format("{tmp_dir}/start_metastore_script")
 
 hive_aux_jars_path = config['configurations']['hive-env']['hive_aux_jars_path']
 hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
@@ -113,7 +114,7 @@ mysql_user = "mysql"
 mysql_group = 'mysql'
 mysql_host = config['clusterHostInfo']['hive_mysql_host']
 
-mysql_adduser_path = "/tmp/addMysqlUser.sh"
+mysql_adduser_path = format("{tmp_dir}/addMysqlUser.sh")
 
 ########## HCAT
 

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py

@@ -23,6 +23,7 @@ import status_params
 
 # server configurations
 config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
 
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 oozie_user = config['configurations']['oozie-env']['oozie_user']

+ 4 - 4
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/service_check.py

@@ -35,17 +35,17 @@ def oozie_smoke_shell_file(
 ):
   import params
 
-  File( format("/tmp/{file_name}"),
+  File( format("{tmp_dir}/{file_name}"),
     content = StaticFile(file_name),
     mode = 0755
   )
   
   if params.security_enabled:
-    sh_cmd = format("sh /tmp/{file_name} {conf_dir} {hadoop_conf_dir} {smokeuser} {security_enabled} {smokeuser_keytab} {kinit_path_local}")
+    sh_cmd = format("sh {tmp_dir}/{file_name} {conf_dir} {hadoop_conf_dir} {smokeuser} {security_enabled} {smokeuser_keytab} {kinit_path_local}")
   else:
-    sh_cmd = format("sh /tmp/{file_name} {conf_dir} {hadoop_conf_dir} {smokeuser} {security_enabled}")
+    sh_cmd = format("sh {tmp_dir}/{file_name} {conf_dir} {hadoop_conf_dir} {smokeuser} {security_enabled}")
 
-  Execute( format("/tmp/{file_name}"),
+  Execute( format("{tmp_dir}/{file_name}"),
     command   = sh_cmd,
     tries     = 3,
     try_sleep = 5,

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/params.py

@@ -23,6 +23,7 @@ from resource_management import *
 
 # server configurations
 config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
 
 pig_conf_dir = "/etc/pig/conf"
 hadoop_conf_dir = "/etc/hadoop/conf"

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/service_check.py

@@ -45,12 +45,12 @@ class PigServiceCheck(Script):
       kinit_path_local = params.kinit_path_local
     )
 
-    File( '/tmp/pigSmoke.sh',
+    File( format("{tmp_dir}/pigSmoke.sh"),
       content = StaticFile("pigSmoke.sh"),
       mode = 0755
     )
 
-    Execute( "pig /tmp/pigSmoke.sh",
+    Execute( format("pig {tmp_dir}/pigSmoke.sh"),
       tries     = 3,
       try_sleep = 5,
       path      = '/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py

@@ -24,6 +24,7 @@ import status_params
 
 # server configurations
 config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
 
 hcat_user = config['configurations']['hive-env']['hcat_user']
 webhcat_user = config['configurations']['hive-env']['webhcat_user']

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/service_check.py

@@ -25,12 +25,12 @@ class WebHCatServiceCheck(Script):
     import params
     env.set_params(params)
 
-    File('/tmp/templetonSmoke.sh',
+    File(format("{tmp_dir}/templetonSmoke.sh"),
          content= StaticFile('templetonSmoke.sh'),
          mode=0755
     )
 
-    cmd = format("sh /tmp/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {smokeuser_keytab}"
+    cmd = format("sh {tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {smokeuser_keytab}"
                  " {security_param} {kinit_path_local}",
                  smokeuser_keytab=params.smoke_user_keytab if params.security_enabled else "no_keytab")
 

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/package/scripts/params.py

@@ -24,6 +24,7 @@ import status_params
 
 # server configurations
 config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
 
 config_dir = "/etc/zookeeper/conf"
 zk_user =  config['configurations']['zookeeper-env']['zk_user']

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/package/scripts/service_check.py

@@ -26,12 +26,12 @@ class ZookeeperServiceCheck(Script):
     import params
     env.set_params(params)
 
-    File("/tmp/zkSmoke.sh",
+    File(format("{tmp_dir}/zkSmoke.sh"),
          mode=0755,
          content=StaticFile('zkSmoke.sh')
     )
 
-    cmd_qourum = format("sh /tmp/zkSmoke.sh {smoke_script} {smokeuser} {config_dir} {clientPort} "
+    cmd_qourum = format("sh {tmp_dir}/zkSmoke.sh {smoke_script} {smokeuser} {config_dir} {clientPort} "
                   "{security_enabled} {kinit_path_local} {smokeUserKeytab}",
                   smokeUserKeytab=params.smoke_user_keytab if params.security_enabled else "no_keytab")
 

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py

@@ -22,6 +22,7 @@ from resource_management.core.system import System
 import os
 
 config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
 
 #users and groups
 yarn_user = config['configurations']['yarn-env']['yarn_user']

+ 4 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py

@@ -152,10 +152,12 @@ def set_uid(user, user_dirs):
   """
   user_dirs - comma separated directories
   """
-  File("/tmp/changeUid.sh",
+  import params
+
+  File(format("{tmp_dir}/changeUid.sh"),
        content=StaticFile("changeToSecureUid.sh"),
        mode=0555)
-  Execute(format("/tmp/changeUid.sh {user} {user_dirs} 2>/dev/null"),
+  Execute(format("{tmp_dir}/changeUid.sh {user} {user_dirs} 2>/dev/null"),
           not_if = format("test $(id -u {user}) -gt 1000"))
   
 def setup_java():

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py

@@ -24,6 +24,7 @@ import status_params
 
 # server configurations
 config = Script.get_config()
+exec_tmp_dir = Script.get_tmp_dir()
 
 hbase_conf_dir = "/etc/hbase/conf"
 daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"

+ 4 - 4
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/service_check.py

@@ -30,9 +30,9 @@ class HbaseServiceCheck(Script):
     output_file = "/apps/hbase/data/ambarismoketest"
     test_cmd = format("fs -test -e {output_file}")
     smokeuser_kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smoke_test_user};") if params.security_enabled else ""
-    hbase_servicecheck_file = '/tmp/hbase-smoke.sh'
+    hbase_servicecheck_file = format("{exec_tmp_dir}/hbase-smoke.sh")
   
-    File( '/tmp/hbaseSmokeVerify.sh',
+    File( format("{exec_tmp_dir}/hbaseSmokeVerify.sh"),
       content = StaticFile("hbaseSmokeVerify.sh"),
       mode = 0755
     )
@@ -43,7 +43,7 @@ class HbaseServiceCheck(Script):
     )
     
     if params.security_enabled:    
-      hbase_grant_premissions_file = '/tmp/hbase_grant_permissions.sh'
+      hbase_grant_premissions_file = format("{exec_tmp_dir}/hbase_grant_permissions.sh")
       grantprivelegecmd = format("{kinit_cmd} hbase shell {hbase_grant_premissions_file}")
   
       File( hbase_grant_premissions_file,
@@ -58,7 +58,7 @@ class HbaseServiceCheck(Script):
       )
 
     servicecheckcmd = format("{smokeuser_kinit_cmd} hbase --config {hbase_conf_dir} shell {hbase_servicecheck_file}")
-    smokeverifycmd = format("{smokeuser_kinit_cmd} /tmp/hbaseSmokeVerify.sh {hbase_conf_dir} {service_check_data}")
+    smokeverifycmd = format("{smokeuser_kinit_cmd} {exec_tmp_dir}/hbaseSmokeVerify.sh {hbase_conf_dir} {service_check_data}")
   
     Execute( servicecheckcmd,
       tries     = 3,

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py

@@ -112,11 +112,11 @@ def format_namenode(force=None):
       ExecuteHadoop('namenode -format',
                     kinit_override=True)
     else:
-      File('/tmp/checkForFormat.sh',
+      File(format("{tmp_dir}/checkForFormat.sh"),
            content=StaticFile("checkForFormat.sh"),
            mode=0755)
       Execute(format(
-        "/tmp/checkForFormat.sh {hdfs_user} {hadoop_conf_dir} {old_mark_dir} "
+        "{tmp_dir}/checkForFormat.sh {hdfs_user} {hadoop_conf_dir} {old_mark_dir} "
         "{mark_dir} {dfs_name_dir}"),
               not_if=format("test -d {old_mark_dir} || test -d {mark_dir}"),
               path="/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin"

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py

@@ -22,6 +22,7 @@ import status_params
 import os
 
 config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
 
 if System.get_instance().os_type == "oraclelinux":
   ulimit_cmd = ''

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/service_check.py

@@ -26,7 +26,7 @@ class HdfsServiceCheck(Script):
 
     env.set_params(params)
     unique = functions.get_unique_id_and_date()
-    dir = '/tmp'
+    dir = params.tmp_dir
     tmp_file = format("{dir}/{unique}")
 
     safemode_command = "dfsadmin -safemode get | grep OFF"
@@ -76,7 +76,7 @@ class HdfsServiceCheck(Script):
       journalnode_port = params.journalnode_port
       smoke_test_user = params.smoke_user
       checkWebUIFileName = "checkWebUI.py"
-      checkWebUIFilePath = format("/tmp/{checkWebUIFileName}")
+      checkWebUIFilePath = format("{tmp_dir}/{checkWebUIFileName}")
       comma_sep_jn_hosts = ",".join(params.journalnode_hosts)
       checkWebUICmd = format(
         "su - {smoke_test_user} -c 'python {checkWebUIFilePath} -m "

+ 3 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py

@@ -33,12 +33,12 @@ def hcat_service_check():
     else:
       kinit_cmd = ""
 
-    File('/tmp/hcatSmoke.sh',
+    File(format("{tmp_dir}/hcatSmoke.sh"),
          content=StaticFile("hcatSmoke.sh"),
          mode=0755
     )
 
-    prepare_cmd = format("{kinit_cmd}env JAVA_HOME={java64_home} /tmp/hcatSmoke.sh hcatsmoke{unique} prepare")
+    prepare_cmd = format("{kinit_cmd}env JAVA_HOME={java64_home} {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} prepare")
 
     Execute(prepare_cmd,
             tries=3,
@@ -56,7 +56,7 @@ def hcat_service_check():
                   keytab=params.hdfs_user_keytab
     )
 
-    cleanup_cmd = format("{kinit_cmd} /tmp/hcatSmoke.sh hcatsmoke{unique} cleanup")
+    cleanup_cmd = format("{kinit_cmd} {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} cleanup")
 
     Execute(cleanup_cmd,
             tries=3,

+ 6 - 5
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py

@@ -23,6 +23,7 @@ import status_params
 
 # server configurations
 config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
 
 hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
 hive_server_conf_dir = "/etc/hive/conf.server"
@@ -60,8 +61,8 @@ hive_server_port = default('/configurations/hive-site/hive.server2.thrift.port',
 hive_url = format("jdbc:hive2://{hive_server_host}:{hive_server_port}")
 
 smokeuser = config['configurations']['hadoop-env']['smokeuser']
-smoke_test_sql = "/tmp/hiveserver2.sql"
-smoke_test_path = "/tmp/hiveserver2Smoke.sh"
+smoke_test_sql = format("{tmp_dir}/hiveserver2.sql")
+smoke_test_path = format("{tmp_dir}/hiveserver2Smoke.sh")
 smoke_user_keytab = config['configurations']['hadoop-env']['smokeuser_keytab']
 
 _authentication = config['configurations']['core-site']['hadoop.security.authentication']
@@ -100,8 +101,8 @@ target = format("{hive_lib}/{jdbc_jar_name}")
 jdk_location = config['hostLevelParams']['jdk_location']
 driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
 
-start_hiveserver2_path = "/tmp/start_hiveserver2_script"
-start_metastore_path = "/tmp/start_metastore_script"
+start_hiveserver2_path = format("{tmp_dir}/start_hiveserver2_script")
+start_metastore_path = format("{tmp_dir}/start_metastore_script")
 
 hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
 hive_heapsize = config['configurations']['hive-site']['hive.heapsize']
@@ -114,7 +115,7 @@ mysql_user = "mysql"
 mysql_group = 'mysql'
 mysql_host = config['clusterHostInfo']['hive_mysql_host']
 
-mysql_adduser_path = "/tmp/addMysqlUser.sh"
+mysql_adduser_path = format("{tmp_dir}/addMysqlUser.sh")
 
 ######## Metastore Schema
 if str(hdp_stack_version).startswith('2.1'):

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py

@@ -23,6 +23,7 @@ import status_params
 
 # server configurations
 config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
 
 oozie_user = config['configurations']['oozie-env']['oozie_user']
 smokeuser = config['configurations']['hadoop-env']['smokeuser']

+ 4 - 4
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/service_check.py

@@ -35,7 +35,7 @@ def oozie_smoke_shell_file(
 ):
   import params
 
-  File( format("/tmp/{file_name}"),
+  File( format("{tmp_dir}/{file_name}"),
     content = StaticFile(file_name),
     mode = 0755
   )
@@ -43,11 +43,11 @@ def oozie_smoke_shell_file(
   os_family = System.get_instance().os_family
   
   if params.security_enabled:
-    sh_cmd = format("/tmp/{file_name} {os_family} {conf_dir} {hadoop_conf_dir} {smokeuser} {security_enabled} {smokeuser_keytab} {kinit_path_local}")
+    sh_cmd = format("{tmp_dir}/{file_name} {os_family} {conf_dir} {hadoop_conf_dir} {smokeuser} {security_enabled} {smokeuser_keytab} {kinit_path_local}")
   else:
-    sh_cmd = format("/tmp/{file_name} {os_family} {conf_dir} {hadoop_conf_dir} {smokeuser} {security_enabled}")
+    sh_cmd = format("{tmp_dir}/{file_name} {os_family} {conf_dir} {hadoop_conf_dir} {smokeuser} {security_enabled}")
 
-  Execute( format("/tmp/{file_name}"),
+  Execute( format("{tmp_dir}/{file_name}"),
     command   = sh_cmd,
     tries     = 3,
     try_sleep = 5,

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py

@@ -23,6 +23,7 @@ from resource_management import *
 
 # server configurations
 config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
 
 pig_conf_dir = "/etc/pig/conf"
 hadoop_conf_dir = "/etc/hadoop/conf"

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/service_check.py

@@ -45,12 +45,12 @@ class PigServiceCheck(Script):
       kinit_path_local = params.kinit_path_local
     )
 
-    File( '/tmp/pigSmoke.sh',
+    File( format("{tmp_dir}/pigSmoke.sh"),
       content = StaticFile("pigSmoke.sh"),
       mode = 0755
     )
 
-    Execute( "pig /tmp/pigSmoke.sh",
+    Execute( format("pig {tmp_dir}/pigSmoke.sh"),
       tries     = 3,
       try_sleep = 5,
       path      = '/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py

@@ -24,6 +24,7 @@ import status_params
 
 # server configurations
 config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
 
 hcat_user = config['configurations']['hive-env']['hcat_user']
 webhcat_user = config['configurations']['hive-env']['webhcat_user']

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/service_check.py

@@ -26,12 +26,12 @@ class WebHCatServiceCheck(Script):
 
     env.set_params(params)
 
-    File('/tmp/templetonSmoke.sh',
+    File(format("{tmp_dir}/templetonSmoke.sh"),
          content= StaticFile('templetonSmoke.sh'),
          mode=0755
     )
 
-    cmd = format("/tmp/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {smokeuser_keytab}"
+    cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {smokeuser_keytab}"
                  " {security_param} {kinit_path_local}",
                  smokeuser_keytab=params.smoke_user_keytab if params.security_enabled else "no_keytab")
 

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py

@@ -24,6 +24,7 @@ import status_params
 
 # server configurations
 config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
 
 config_dir = "/etc/hadoop/conf"
 

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service_check.py

@@ -36,7 +36,7 @@ class ServiceCheck(Script):
       component_address = params.rm_webui_address
 
     validateStatusFileName = "validateYarnComponentStatus.py"
-    validateStatusFilePath = format("/tmp/{validateStatusFileName}")
+    validateStatusFilePath = format("{tmp_dir}/{validateStatusFileName}")
     python_executable = sys.executable
     validateStatusCmd = format("{python_executable} {validateStatusFilePath} {component_type} -p {component_address} -s {hadoop_ssl_enabled}")
 

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py

@@ -24,6 +24,7 @@ import status_params
 
 # server configurations
 config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
 
 config_dir = "/etc/zookeeper/conf"
 zk_user =  config['configurations']['zookeeper-env']['zk_user']

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/service_check.py

@@ -26,12 +26,12 @@ class ZookeeperServiceCheck(Script):
     import params
     env.set_params(params)
 
-    File("/tmp/zkSmoke.sh",
+    File(format("{tmp_dir}/zkSmoke.sh"),
          mode=0755,
          content=StaticFile('zkSmoke.sh')
     )
 
-    cmd_qourum = format("/tmp/zkSmoke.sh {smoke_script} {smokeuser} {config_dir} {clientPort} "
+    cmd_qourum = format("{tmp_dir}/zkSmoke.sh {smoke_script} {smokeuser} {config_dir} {clientPort} "
                   "{security_enabled} {kinit_path_local} {smokeUserKeytab}",
                   smokeUserKeytab=params.smoke_user_keytab if params.security_enabled else "no_keytab")
 

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/params.py

@@ -24,6 +24,7 @@ import status_params
 
 # server configurations
 config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
 
 config_dir = "/etc/hadoop/conf"
 

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/service_check.py

@@ -36,7 +36,7 @@ class ServiceCheck(Script):
       component_address = params.rm_webui_address
 
     validateStatusFileName = "validateYarnComponentStatus.py"
-    validateStatusFilePath = format("/tmp/{validateStatusFileName}")
+    validateStatusFilePath = format("{tmp_dir}/{validateStatusFileName}")
     python_executable = sys.executable
     validateStatusCmd = format("{python_executable} {validateStatusFilePath} {component_type} -p {component_address} -s {hadoop_ssl_enabled}")
 

+ 5 - 4
ambari-server/src/test/python/stacks/utils/RMFTestCase.py

@@ -88,10 +88,11 @@ class RMFTestCase(TestCase):
     with Environment(basedir, test_mode=True) as RMFTestCase.env:
       with patch('resource_management.core.shell.checked_call', return_value=shell_mock_value): # we must always mock any shell calls
         with patch.object(Script, 'get_config', return_value=self.config_dict): # mocking configurations
-          with patch.object(Script, 'install_packages'):
-            with patch('resource_management.libraries.functions.get_kinit_path', return_value=kinit_path_local):
-              with patch.object(platform, 'linux_distribution', return_value=os_type):
-                method(RMFTestCase.env)
+          with patch.object(Script, 'get_tmp_dir', return_value="/tmp"):
+            with patch.object(Script, 'install_packages'):
+              with patch('resource_management.libraries.functions.get_kinit_path', return_value=kinit_path_local):
+                with patch.object(platform, 'linux_distribution', return_value=os_type):
+                  method(RMFTestCase.env)
     sys.path.remove(scriptsdir)
   
   def getConfig(self):