소스 검색

AMBARI-4885. Refactor temporary shared resource for HDFS and MR initialization (Ivan Kozlov via aonishuk)

Andrew Onishuk 11 년 전
부모
커밋
b00d45e544
67개의 변경된 파일1435개의 추가작업 그리고 720개의 파일을 삭제
  1. 4 2
      ambari-agent/src/main/python/resource_management/libraries/script/hook.py
  2. 36 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/hook.py
  3. 61 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/params.py
  4. 45 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/shared_initialization.py
  5. 0 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/templates/hadoop-env.sh.j2
  6. 1 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/hook.py
  7. 43 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
  8. 43 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
  9. 1 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-RESTART/scripts/hook.py
  10. 2 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/hook.py
  11. 0 9
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py
  12. 10 144
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py
  13. 3 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/datanode.py
  14. 63 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs.py
  15. 2 14
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_client.py
  16. 2 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/namenode.py
  17. 1 3
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
  18. 2 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/snamenode.py
  19. 0 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/templates/hdfs.conf.j2
  20. 0 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/templates/slaves.j2
  21. 77 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/mapreduce.py
  22. 6 2
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py
  23. 0 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/templates/taskcontroller.cfg.j2
  24. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/hook.py
  25. 12 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
  26. 43 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
  27. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-RESTART/scripts/hook.py
  28. 0 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py
  29. 0 12
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
  30. 6 139
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
  31. 3 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/datanode.py
  32. 63 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
  33. 3 14
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_client.py
  34. 3 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/journalnode.py
  35. 2 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
  36. 2 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
  37. 2 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/snamenode.py
  38. 2 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/zkfc_slave.py
  39. 0 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/templates/hdfs.conf.j2
  40. 0 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/templates/slaves.j2
  41. 9 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
  42. 77 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
  43. 0 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/templates/taskcontroller.cfg.j2
  44. 35 3
      ambari-server/src/test/python/stacks/1.3.2/HDFS/test_datanode.py
  45. 38 0
      ambari-server/src/test/python/stacks/1.3.2/HDFS/test_namenode.py
  46. 35 0
      ambari-server/src/test/python/stacks/1.3.2/HDFS/test_snamenode.py
  47. 27 1
      ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_client.py
  48. 27 0
      ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_historyserver.py
  49. 63 2
      ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_jobtracker.py
  50. 28 3
      ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_tasktracker.py
  51. 48 0
      ambari-server/src/test/python/stacks/1.3.2/hooks/after-INSTALL/test_after_install.py
  52. 60 29
      ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py
  53. 33 190
      ambari-server/src/test/python/stacks/1.3.2/hooks/before-START/test_before_start.py
  54. 34 2
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
  55. 32 1
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
  56. 34 2
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
  57. 33 1
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
  58. 34 2
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
  59. 0 0
      ambari-server/src/test/python/stacks/2.0.6/HIVE/_test_hive_service_check.py
  60. 39 0
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
  61. 39 0
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
  62. 39 1
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
  63. 40 1
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
  64. 55 0
      ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
  65. 13 0
      ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py
  66. 2 136
      ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
  67. 16 0
      ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py

+ 4 - 2
ambari-agent/src/main/python/resource_management/libraries/script/hook.py

@@ -46,9 +46,11 @@ class Hook(Script):
     """
     args = sys.argv
     #Hook script to run
-    args[0] = args[0].replace(args[1], command)
+    args[0] = args[0].replace('before-'+args[1], command)
+    args[0] = args[0].replace('after-'+args[1], command)
     #Hook script base directory
-    args[3] = args[3].replace(args[1], command)
+    args[3] = args[3].replace('before-'+args[1], command)
+    args[3] = args[3].replace('after-'+args[1], command)
 
 
     cmd = [sys.executable]

+ 36 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/hook.py

@@ -0,0 +1,36 @@
+##!/usr/bin/env python2.6
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+from resource_management import *
+from shared_initialization import *
+
+#Hook for hosts with only client without other components
+class AfterInstallHook(Hook):
+
+  def hook(self, env):
+    import params
+
+    env.set_params(params)
+    setup_hadoop_env()
+    setup_config()
+
+if __name__ == "__main__":
+  AfterInstallHook().execute()

+ 61 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/params.py

@@ -0,0 +1,61 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+from resource_management.core.system import System
+import os
+
+config = Script.get_config()
+
+#security params
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
+#java params
+java_home = config['hostLevelParams']['java_home']
+#hadoop params
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_log_dir_prefix = config['configurations']['global']['hdfs_log_dir_prefix']
+hadoop_pid_dir_prefix = config['configurations']['global']['hadoop_pid_dir_prefix']
+
+#hadoop-env.sh
+if System.get_instance().os_family == "suse":
+  jsvc_path = "/usr/lib/bigtop-utils"
+else:
+  jsvc_path = "/usr/libexec/bigtop-utils"
+hadoop_heapsize = config['configurations']['global']['hadoop_heapsize']
+namenode_heapsize = config['configurations']['global']['namenode_heapsize']
+namenode_opt_newsize =  config['configurations']['global']['namenode_opt_newsize']
+namenode_opt_maxnewsize =  config['configurations']['global']['namenode_opt_maxnewsize']
+
+jtnode_opt_newsize = default("jtnode_opt_newsize","200m")
+jtnode_opt_maxnewsize = default("jtnode_opt_maxnewsize","200m")
+jtnode_heapsize =  default("jtnode_heapsize","1024m")
+ttnode_heapsize = default("ttnode_heapsize","1024m")
+
+dtnode_heapsize = config['configurations']['global']['dtnode_heapsize']
+
+mapred_pid_dir_prefix = default("mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
+mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+mapred_log_dir_prefix = default("mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
+
+
+#users and groups
+hdfs_user = config['configurations']['global']['hdfs_user']
+user_group = config['configurations']['global']['user_group']

+ 45 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/shared_initialization.py

@@ -0,0 +1,45 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+import os
+from resource_management import *
+
+def setup_hadoop_env():
+  import params
+  if params.security_enabled:
+    tc_owner = "root"
+  else:
+    tc_owner = params.hdfs_user
+  Directory(params.hadoop_conf_dir,
+            recursive=True,
+            owner='root',
+            group='root'
+  )
+  File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
+       owner=tc_owner,
+       content=Template('hadoop-env.sh.j2')
+  )
+
+def setup_config():
+  import params
+  XmlConfig("core-site.xml",
+            conf_dir=params.hadoop_conf_dir,
+            configurations=params.config['configurations']['core-site'],
+            owner=params.hdfs_user,
+            group=params.user_group
+  )

+ 0 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/hadoop-env.sh.j2 → ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/templates/hadoop-env.sh.j2


+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/hook.py

@@ -29,6 +29,7 @@ class BeforeConfigureHook(Hook):
     import params
 
     env.set_params(params)
+    setup_java()
     setup_users()
     install_packages()
 

+ 43 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py

@@ -23,6 +23,49 @@ import os
 
 config = Script.get_config()
 
+#java params
+artifact_dir = "/tmp/HDP-artifacts/"
+jdk_name = default("/hostLevelParams/jdk_name", None) # None when jdk is already installed by user
+jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
+jce_location = config['hostLevelParams']['jdk_location']
+jdk_location = config['hostLevelParams']['jdk_location']
+java_home = config['hostLevelParams']['java_home']
+if System.get_instance().os_family == "suse":
+  jsvc_path = "/usr/lib/bigtop-utils"
+else:
+  jsvc_path = "/usr/libexec/bigtop-utils"
+#security params
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
+#hadoop params
+hadoop_conf_dir = "/etc/hadoop/conf"
+
+#hadoop-env.sh
+
+java_home = config['hostLevelParams']['java_home']
+if System.get_instance().os_family == "suse":
+  jsvc_path = "/usr/lib/bigtop-utils"
+else:
+  jsvc_path = "/usr/libexec/bigtop-utils"
+hadoop_heapsize = config['configurations']['global']['hadoop_heapsize']
+namenode_heapsize = config['configurations']['global']['namenode_heapsize']
+namenode_opt_newsize =  config['configurations']['global']['namenode_opt_newsize']
+namenode_opt_maxnewsize =  config['configurations']['global']['namenode_opt_maxnewsize']
+
+jtnode_opt_newsize = default("jtnode_opt_newsize","200m")
+jtnode_opt_maxnewsize = default("jtnode_opt_maxnewsize","200m")
+jtnode_heapsize =  default("jtnode_heapsize","1024m")
+ttnode_heapsize = "1024m"
+
+dtnode_heapsize = config['configurations']['global']['dtnode_heapsize']
+mapred_pid_dir_prefix = default("mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
+mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+mapred_log_dir_prefix = default("mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
+
+hdfs_log_dir_prefix = config['configurations']['global']['hdfs_log_dir_prefix']
+hadoop_pid_dir_prefix = config['configurations']['global']['hadoop_pid_dir_prefix']
+
 #users and groups
 yarn_user = config['configurations']['global']['yarn_user']
 hbase_user = config['configurations']['global']['hbase_user']

+ 43 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py

@@ -101,6 +101,49 @@ def set_uid(user, user_dirs):
   Execute(format("/tmp/changeUid.sh {user} {user_dirs} 2>/dev/null"),
           not_if = format("test $(id -u {user}) -gt 1000"))
 
+def setup_java():
+  """
+  Installs jdk using specific params, that comes from ambari-server
+  """
+  import params
+
+  jdk_curl_target = format("{artifact_dir}/{jdk_name}")
+  java_dir = os.path.dirname(params.java_home)
+  java_exec = format("{java_home}/bin/java")
+
+  if not params.jdk_name:
+    return
+
+  Execute(format("mkdir -p {artifact_dir} ; curl -kf --retry 10 {jdk_location}/{jdk_name} -o {jdk_curl_target}"),
+          path = ["/bin","/usr/bin/"],
+          not_if = format("test -e {java_exec}"))
+
+  if params.jdk_name.endswith(".bin"):
+    install_cmd = format("mkdir -p {java_dir} ; chmod +x {jdk_curl_target}; cd {java_dir} ; echo A | {jdk_curl_target} -noregister > /dev/null 2>&1")
+  elif params.jdk_name.endswith(".gz"):
+    install_cmd = format("mkdir -p {java_dir} ; cd {java_dir} ; tar -xf {jdk_curl_target} > /dev/null 2>&1")
+
+  Execute(install_cmd,
+          path = ["/bin","/usr/bin/"],
+          not_if = format("test -e {java_exec}")
+  )
+  jce_curl_target = format("{artifact_dir}/{jce_policy_zip}")
+  download_jce = format("mkdir -p {artifact_dir}; curl -kf --retry 10 {jce_location}/{jce_policy_zip} -o {jce_curl_target}")
+  Execute( download_jce,
+           path = ["/bin","/usr/bin/"],
+           not_if =format("test -e {jce_curl_target}"),
+           ignore_failures = True
+  )
+
+  if params.security_enabled:
+    security_dir = format("{java_home}/jre/lib/security")
+    extract_cmd = format("rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q {jce_curl_target}")
+    Execute(extract_cmd,
+            only_if = format("test -e {security_dir} && test -f {jce_curl_target}"),
+            cwd  = security_dir,
+            path = ['/bin/','/usr/bin']
+    )
+
 def install_packages():
   packages = {"redhat": ["net-snmp-utils", "net-snmp"],
               "suse": ["net-snmp"],

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-RESTART/scripts/hook.py

@@ -24,7 +24,7 @@ from resource_management import *
 class BeforeConfigureHook(Hook):
 
   def hook(self, env):
-    self.run_custom_hook('START')
+    self.run_custom_hook('before-START')
 
 if __name__ == "__main__":
   BeforeConfigureHook().execute()

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/hook.py

@@ -29,8 +29,9 @@ class BeforeConfigureHook(Hook):
     import params
 
     env.set_params(params)
-    setup_java()
+    self.run_custom_hook('after-INSTALL')
     setup_hadoop()
+    setup_database()
     setup_configs()
     create_javahome_symlink()
     init_services()

+ 0 - 9
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py

@@ -23,23 +23,14 @@ import os
 
 config = Script.get_config()
 
-#java params
-artifact_dir = "/tmp/HDP-artifacts/"
-jdk_name = default("/hostLevelParams/jdk_name", None) # None when jdk is already installed by user
-jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
-jce_location = config['hostLevelParams']['jdk_location']
-jdk_location = config['hostLevelParams']['jdk_location']
 #security params
 _authentication = config['configurations']['core-site']['hadoop.security.authentication']
 security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 
 #users and groups
-mapred_user = config['configurations']['global']['mapred_user']
 hdfs_user = config['configurations']['global']['hdfs_user']
-yarn_user = config['configurations']['global']['yarn_user']
 
 user_group = config['configurations']['global']['user_group']
-mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
 
 #snmp
 snmp_conf_dir = "/etc/snmp/"

+ 10 - 144
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py

@@ -21,49 +21,6 @@ import os
 
 from resource_management import *
 
-def setup_java():
-  """
-  Installs jdk using specific params, that comes from ambari-server
-  """
-  import params
-
-  jdk_curl_target = format("{artifact_dir}/{jdk_name}")
-  java_dir = os.path.dirname(params.java_home)
-  java_exec = format("{java_home}/bin/java")
-  
-  if not params.jdk_name:
-    return
-  
-  Execute(format("mkdir -p {artifact_dir} ; curl -kf --retry 10 {jdk_location}/{jdk_name} -o {jdk_curl_target}"),
-          path = ["/bin","/usr/bin/"],
-          not_if = format("test -e {java_exec}"))
-
-  if params.jdk_name.endswith(".bin"):
-    install_cmd = format("mkdir -p {java_dir} ; chmod +x {jdk_curl_target}; cd {java_dir} ; echo A | {jdk_curl_target} -noregister > /dev/null 2>&1")
-  elif params.jdk_name.endswith(".gz"):
-    install_cmd = format("mkdir -p {java_dir} ; cd {java_dir} ; tar -xf {jdk_curl_target} > /dev/null 2>&1")
-  
-  Execute(install_cmd,
-          path = ["/bin","/usr/bin/"],
-          not_if = format("test -e {java_exec}")
-  )
-  jce_curl_target = format("{artifact_dir}/{jce_policy_zip}")
-  download_jce = format("mkdir -p {artifact_dir}; curl -kf --retry 10 {jce_location}/{jce_policy_zip} -o {jce_curl_target}")
-  Execute( download_jce,
-        path = ["/bin","/usr/bin/"],
-        not_if =format("test -e {jce_curl_target}"),
-        ignore_failures = True
-  )
-  
-  if params.security_enabled:
-    security_dir = format("{java_home}/jre/lib/security")
-    extract_cmd = format("rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q {jce_curl_target}")
-    Execute(extract_cmd,
-          only_if = format("test -e {security_dir} && test -f {jce_curl_target}"),
-          cwd  = security_dir,
-          path = ['/bin/','/usr/bin']
-    )
-
 def setup_hadoop():
   """
   Setup hadoop files and directories
@@ -77,11 +34,6 @@ def setup_hadoop():
   install_snappy()
 
   #directories
-  Directory(params.hadoop_conf_dir,
-            recursive=True,
-            owner='root',
-            group='root'
-  )
   Directory(params.hdfs_log_dir_prefix,
             recursive=True,
             owner='root',
@@ -94,40 +46,14 @@ def setup_hadoop():
   )
 
   #files
-  File(os.path.join(params.limits_conf_dir, 'hdfs.conf'),
-       owner='root',
-       group='root',
-       mode=0644,
-       content=Template("hdfs.conf.j2")
-  )
   if params.security_enabled:
-    File(os.path.join(params.hadoop_bin, "task-controller"),
-         owner="root",
-         group=params.mapred_tt_group,
-         mode=06050
-    )
-    tc_mode = 0644
     tc_owner = "root"
   else:
-    tc_mode = None
     tc_owner = params.hdfs_user
 
-  if tc_mode:
-    File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'),
-         owner = tc_owner,
-         mode = tc_mode,
-         group = params.mapred_tt_group,
-         content=Template("taskcontroller.cfg.j2")
-    )
-  else:
-    File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'),
+    File(os.path.join(params.hadoop_conf_dir, 'commons-logging.properties'),
          owner=tc_owner,
-         content=Template("taskcontroller.cfg.j2")
-    )
-  for file in ['hadoop-env.sh', 'commons-logging.properties', 'slaves']:
-    File(os.path.join(params.hadoop_conf_dir, file),
-         owner=tc_owner,
-         content=Template(file + ".j2")
+         content=Template("commons-logging.properties.j2")
     )
 
   health_check_template = "health_check" #for stack 1 use 'health_check'
@@ -156,6 +82,11 @@ def setup_hadoop():
        content=Template("hadoop-metrics2.properties.j2")
   )
 
+def setup_database():
+  """
+  Load DB
+  """
+  import params
   db_driver_dload_cmd = ""
   if params.server_db_name == 'oracle' and params.oracle_driver_url != "":
     db_driver_dload_cmd = format(
@@ -176,66 +107,17 @@ def setup_configs():
   """
   import params
 
-  if "mapred-queue-acls" in params.config['configurations']:
-    XmlConfig("mapred-queue-acls.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations'][
-                'mapred-queue-acls'],
-              owner=params.mapred_user,
-              group=params.user_group
-    )
-  elif os.path.exists(
-      os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml")):
-    File(os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml"),
-         owner=params.mapred_user,
-         group=params.user_group
-    )
-
-  if "hadoop-policy" in params.config['configurations']:
-    XmlConfig("hadoop-policy.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations']['hadoop-policy'],
-              owner=params.hdfs_user,
-              group=params.user_group
-    )
-
   XmlConfig("core-site.xml",
             conf_dir=params.hadoop_conf_dir,
             configurations=params.config['configurations']['core-site'],
             owner=params.hdfs_user,
             group=params.user_group
   )
-
-  if "mapred-site" in params.config['configurations']:
-    XmlConfig("mapred-site.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations']['mapred-site'],
-              owner=params.mapred_user,
-              group=params.user_group
-    )
-
   File(params.task_log4j_properties_location,
        content=StaticFile("task-log4j.properties"),
        mode=0755
   )
 
-  if "capacity-scheduler" in params.config['configurations']:
-    XmlConfig("capacity-scheduler.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations'][
-                'capacity-scheduler'],
-              owner=params.hdfs_user,
-              group=params.user_group
-    )
-
-  XmlConfig("hdfs-site.xml",
-            conf_dir=params.hadoop_conf_dir,
-            configurations=params.config['configurations']['hdfs-site'],
-            owner=params.hdfs_user,
-            group=params.user_group
-  )
-
-  # if params.stack_version[0] == "1":
   Link('/usr/lib/hadoop/lib/hadoop-tools.jar',
        to = '/usr/lib/hadoop/hadoop-tools.jar'
   )
@@ -245,26 +127,10 @@ def setup_configs():
          owner=params.hdfs_user,
          group=params.user_group
     )
-  if os.path.exists(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml')):
-    File(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml'),
-         owner=params.mapred_user,
-         group=params.user_group
-    )
+
   if os.path.exists(os.path.join(params.hadoop_conf_dir, 'masters')):
     File(os.path.join(params.hadoop_conf_dir, 'masters'),
-              owner=params.hdfs_user,
-              group=params.user_group
-    )
-  if os.path.exists(
-      os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example')):
-    File(os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example'),
-         owner=params.mapred_user,
-         group=params.user_group
-    )
-  if os.path.exists(
-      os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example')):
-    File(os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example'),
-         owner=params.mapred_user,
+         owner=params.hdfs_user,
          group=params.user_group
     )
 
@@ -312,4 +178,4 @@ def init_services():
   # enable snmpd
   Execute( "service snmpd start; chkconfig snmpd on",
     path = "/usr/local/bin/:/bin/:/sbin/"
-  )  
+  )  

+ 3 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/datanode.py

@@ -19,6 +19,7 @@ limitations under the License.
 
 from resource_management import *
 from hdfs_datanode import datanode
+from hdfs import hdfs
 
 
 class DataNode(Script):
@@ -44,6 +45,8 @@ class DataNode(Script):
   def configure(self, env):
     import params
 
+    env.set_params(params)
+    hdfs()
     datanode(action="configure")
 
   def status(self, env):

+ 63 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs.py

@@ -0,0 +1,63 @@
+#!/usr/bin/env python2.6
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from resource_management import *
+import sys
+import os
+
+
+def hdfs(name=None):
+  import params
+
+  File(os.path.join(params.limits_conf_dir, 'hdfs.conf'),
+       owner='root',
+       group='root',
+       mode=0644,
+       content=Template("hdfs.conf.j2")
+  )
+
+  if params.security_enabled:
+    tc_mode = 0644
+    tc_owner = "root"
+  else:
+    tc_mode = None
+    tc_owner = params.hdfs_user
+
+  if "hadoop-policy" in params.config['configurations']:
+    XmlConfig("hadoop-policy.xml",
+              conf_dir=params.hadoop_conf_dir,
+              configurations=params.config['configurations']['hadoop-policy'],
+              owner=params.hdfs_user,
+              group=params.user_group
+    )
+
+  XmlConfig("hdfs-site.xml",
+            conf_dir=params.hadoop_conf_dir,
+            configurations=params.config['configurations']['hdfs-site'],
+            owner=params.hdfs_user,
+            group=params.user_group
+  )
+
+  File(os.path.join(params.hadoop_conf_dir, 'slaves'),
+       owner=tc_owner,
+       content=Template("slaves.j2")
+  )

+ 2 - 14
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_client.py

@@ -18,6 +18,7 @@ limitations under the License.
 """
 
 from resource_management import *
+from hdfs import hdfs
 from utils import service
 
 
@@ -44,20 +45,7 @@ class HdfsClient(Script):
 
   def configure(self, env):
     import params
-
-    XmlConfig("core-site.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations']['core-site'],
-              owner=params.hdfs_user,
-              group=params.user_group
-    )
-    
-    XmlConfig("hdfs-site.xml",
-            conf_dir=params.hadoop_conf_dir,
-            configurations=params.config['configurations']['hdfs-site'],
-            owner=params.hdfs_user,
-            group=params.user_group
-    )
+    hdfs()
 
 
 if __name__ == "__main__":

+ 2 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/namenode.py

@@ -19,6 +19,7 @@ limitations under the License.
 
 from resource_management import *
 from hdfs_namenode import namenode
+from hdfs import hdfs
 
 
 class NameNode(Script):
@@ -45,6 +46,7 @@ class NameNode(Script):
     import params
 
     env.set_params(params)
+    hdfs()
     namenode(action="configure")
     pass
 

+ 1 - 3
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py

@@ -143,6 +143,4 @@ HdfsDirectory = functools.partial(
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local
 )
-
-
-
+limits_conf_dir = "/etc/security/limits.d"

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/snamenode.py

@@ -19,6 +19,7 @@ limitations under the License.
 
 from resource_management import *
 from hdfs_snamenode import snamenode
+from hdfs import hdfs
 
 
 class SNameNode(Script):
@@ -49,7 +50,7 @@ class SNameNode(Script):
     import params
 
     env.set_params(params)
-
+    hdfs()
     snamenode(action="configure")
 
   def status(self, env):

+ 0 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/hdfs.conf.j2 → ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/templates/hdfs.conf.j2


+ 0 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/slaves.j2 → ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/templates/slaves.j2


+ 77 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/mapreduce.py

@@ -97,3 +97,80 @@ def mapreduce(name=None):
             owner=params.mapred_user,
             group=params.user_group,
   )
+
+  if params.security_enabled:
+    tc_mode = 0644
+    tc_owner = "root"
+  else:
+    tc_mode = None
+    tc_owner = params.hdfs_user
+
+  if params.security_enabled:
+    File(os.path.join(params.hadoop_bin, "task-controller"),
+         owner="root",
+         group=params.mapred_tt_group,
+         mode=06050
+    )
+    File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'),
+         owner = tc_owner,
+         mode = tc_mode,
+         group = params.mapred_tt_group,
+         content=Template("taskcontroller.cfg.j2")
+    )
+  else:
+    File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'),
+         owner=tc_owner,
+         content=Template("taskcontroller.cfg.j2")
+    )
+
+  if "capacity-scheduler" in params.config['configurations']:
+    XmlConfig("capacity-scheduler.xml",
+              conf_dir=params.hadoop_conf_dir,
+              configurations=params.config['configurations'][
+                'capacity-scheduler'],
+              owner=params.hdfs_user,
+              group=params.user_group
+    )
+
+  if "mapred-queue-acls" in params.config['configurations']:
+    XmlConfig("mapred-queue-acls.xml",
+              conf_dir=params.hadoop_conf_dir,
+              configurations=params.config['configurations'][
+                'mapred-queue-acls'],
+              owner=params.mapred_user,
+              group=params.user_group
+    )
+  elif os.path.exists(
+    os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml")):
+    File(os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml"),
+         owner=params.mapred_user,
+         group=params.user_group
+    )
+
+  if "mapred-site" in params.config['configurations']:
+    XmlConfig("mapred-site.xml",
+              conf_dir=params.hadoop_conf_dir,
+              configurations=params.config['configurations']['mapred-site'],
+              owner=params.mapred_user,
+              group=params.user_group
+    )
+
+  if os.path.exists(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml')):
+    File(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml'),
+         owner=params.mapred_user,
+         group=params.user_group
+    )
+
+  if os.path.exists(
+    os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example')):
+    File(os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example'),
+         owner=params.mapred_user,
+         group=params.user_group
+    )
+
+  if os.path.exists(
+    os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example')):
+    File(os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example'),
+         owner=params.mapred_user,
+         group=params.user_group
+    )

+ 6 - 2
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py

@@ -61,9 +61,9 @@ mapreduce_jobhistory_done_dir = config['configurations']['mapred-site']['mapred.
 #for create_hdfs_directory
 hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_pid_dir_prefix = config['configurations']['global']['hadoop_pid_dir_prefix']
 hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
 hdfs_user = config['configurations']['global']['hdfs_user']
-kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code
@@ -74,4 +74,8 @@ HdfsDirectory = functools.partial(
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local
-)
+)
+
+mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
+
+slave_hosts = default("/clusterHostInfo/slave_hosts", [])

+ 0 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/taskcontroller.cfg.j2 → ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/templates/taskcontroller.cfg.j2


+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/hook.py

@@ -29,6 +29,7 @@ class BeforeConfigureHook(Hook):
     import params
 
     env.set_params(params)
+    setup_java()
     setup_users()
     install_packages()
 

+ 12 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py

@@ -87,3 +87,15 @@ if has_ganglia_server:
   ganglia_server_host = ganglia_server_hosts[0]
 
 hbase_tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir']
+
+#security params
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
+
+#java params
+java_home = config['hostLevelParams']['java_home']
+artifact_dir = "/tmp/HDP-artifacts/"
+jdk_name = default("/hostLevelParams/jdk_name", None) # None when jdk is already installed by user
+jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
+jce_location = config['hostLevelParams']['jdk_location']
+jdk_location = config['hostLevelParams']['jdk_location']

+ 43 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py

@@ -119,6 +119,49 @@ def set_uid(user, user_dirs):
   Execute(format("/tmp/changeUid.sh {user} {user_dirs} 2>/dev/null"),
           not_if = format("test $(id -u {user}) -gt 1000"))
 
+def setup_java():
+  """
+  Installs jdk using specific params, that comes from ambari-server
+  """
+  import params
+
+  jdk_curl_target = format("{artifact_dir}/{jdk_name}")
+  java_dir = os.path.dirname(params.java_home)
+  java_exec = format("{java_home}/bin/java")
+
+  if not params.jdk_name:
+    return
+
+  Execute(format("mkdir -p {artifact_dir} ; curl -kf --retry 10 {jdk_location}/{jdk_name} -o {jdk_curl_target}"),
+          path = ["/bin","/usr/bin/"],
+          not_if = format("test -e {java_exec}"))
+
+  if params.jdk_name.endswith(".bin"):
+    install_cmd = format("mkdir -p {java_dir} ; chmod +x {jdk_curl_target}; cd {java_dir} ; echo A | {jdk_curl_target} -noregister > /dev/null 2>&1")
+  elif params.jdk_name.endswith(".gz"):
+    install_cmd = format("mkdir -p {java_dir} ; cd {java_dir} ; tar -xf {jdk_curl_target} > /dev/null 2>&1")
+
+  Execute(install_cmd,
+          path = ["/bin","/usr/bin/"],
+          not_if = format("test -e {java_exec}")
+  )
+  jce_curl_target = format("{artifact_dir}/{jce_policy_zip}")
+  download_jce = format("mkdir -p {artifact_dir}; curl -kf --retry 10 {jce_location}/{jce_policy_zip} -o {jce_curl_target}")
+  Execute( download_jce,
+           path = ["/bin","/usr/bin/"],
+           not_if =format("test -e {jce_curl_target}"),
+           ignore_failures = True
+  )
+
+  if params.security_enabled:
+    security_dir = format("{java_home}/jre/lib/security")
+    extract_cmd = format("rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q {jce_curl_target}")
+    Execute(extract_cmd,
+            only_if = format("test -e {security_dir} && test -f {jce_curl_target}"),
+            cwd  = security_dir,
+            path = ['/bin/','/usr/bin']
+    )
+
 def install_packages():
   packages = {"redhat": ["net-snmp-utils", "net-snmp"],
               "suse": ["net-snmp"],

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-RESTART/scripts/hook.py

@@ -24,7 +24,7 @@ from resource_management import *
 class BeforeConfigureHook(Hook):
 
   def hook(self, env):
-    self.run_custom_hook('START')
+    self.run_custom_hook('before-START')
 
 if __name__ == "__main__":
   BeforeConfigureHook().execute()

+ 0 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py

@@ -29,7 +29,6 @@ class BeforeConfigureHook(Hook):
     import params
 
     env.set_params(params)
-    setup_java()
     setup_hadoop()
     setup_configs()
     create_javahome_symlink()

+ 0 - 12
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py

@@ -23,12 +23,6 @@ import os
 
 config = Script.get_config()
 
-#java params
-artifact_dir = "/tmp/HDP-artifacts/"
-jdk_name = default("/hostLevelParams/jdk_name", None) # None when jdk is already installed by user
-jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
-jce_location = config['hostLevelParams']['jdk_location']
-jdk_location = config['hostLevelParams']['jdk_location']
 #security params
 _authentication = config['configurations']['core-site']['hadoop.security.authentication']
 security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
@@ -39,7 +33,6 @@ hdfs_user = config['configurations']['global']['hdfs_user']
 yarn_user = config['configurations']['global']['yarn_user']
 
 user_group = config['configurations']['global']['user_group']
-mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
 
 #snmp
 snmp_conf_dir = "/etc/snmp/"
@@ -88,7 +81,6 @@ hadoop_home = "/usr"
 hadoop_bin = "/usr/lib/hadoop/sbin"
 
 task_log4j_properties_location = os.path.join(hadoop_conf_dir, "task-log4j.properties")
-limits_conf_dir = "/etc/security/limits.d"
 
 hdfs_log_dir_prefix = config['configurations']['global']['hdfs_log_dir_prefix']
 hbase_tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir']
@@ -138,10 +130,6 @@ mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
 mapred_log_dir_prefix = default("mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
 
-#taskcontroller.cfg
-
-mapred_local_dir = "/tmp/hadoop-mapred/mapred/local"
-
 #log4j.properties
 
 yarn_log_dir_prefix = default("yarn_log_dir_prefix","/var/log/hadoop-yarn")

+ 6 - 139
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py

@@ -21,49 +21,6 @@ import os
 
 from resource_management import *
 
-def setup_java():
-  """
-  Installs jdk using specific params, that comes from ambari-server
-  """
-  import params
-
-  jdk_curl_target = format("{artifact_dir}/{jdk_name}")
-  java_dir = os.path.dirname(params.java_home)
-  java_exec = format("{java_home}/bin/java")
-  
-  if not params.jdk_name:
-    return
-  
-  Execute(format("mkdir -p {artifact_dir} ; curl -kf --retry 10 {jdk_location}/{jdk_name} -o {jdk_curl_target}"),
-          path = ["/bin","/usr/bin/"],
-          not_if = format("test -e {java_exec}"))
-
-  if params.jdk_name.endswith(".bin"):
-    install_cmd = format("mkdir -p {java_dir} ; chmod +x {jdk_curl_target}; cd {java_dir} ; echo A | {jdk_curl_target} -noregister > /dev/null 2>&1")
-  elif params.jdk_name.endswith(".gz"):
-    install_cmd = format("mkdir -p {java_dir} ; cd {java_dir} ; tar -xf {jdk_curl_target} > /dev/null 2>&1")
-  
-  Execute(install_cmd,
-          path = ["/bin","/usr/bin/"],
-          not_if = format("test -e {java_exec}")
-  )
-  jce_curl_target = format("{artifact_dir}/{jce_policy_zip}")
-  download_jce = format("mkdir -p {artifact_dir}; curl -kf --retry 10 {jce_location}/{jce_policy_zip} -o {jce_curl_target}")
-  Execute( download_jce,
-        path = ["/bin","/usr/bin/"],
-        not_if =format("test -e {jce_curl_target}"),
-        ignore_failures = True
-  )
-  
-  if params.security_enabled:
-    security_dir = format("{java_home}/jre/lib/security")
-    extract_cmd = format("rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q {jce_curl_target}")
-    Execute(extract_cmd,
-          only_if = format("test -e {security_dir} && test -f {jce_curl_target}"),
-          cwd  = security_dir,
-          path = ['/bin/','/usr/bin']
-    )
-
 def setup_hadoop():
   """
   Setup hadoop files and directories
@@ -98,37 +55,12 @@ def setup_hadoop():
             owner=params.hdfs_user,
             )
   #files
-  File(os.path.join(params.limits_conf_dir, 'hdfs.conf'),
-       owner='root',
-       group='root',
-       mode=0644,
-       content=Template("hdfs.conf.j2")
-  )
   if params.security_enabled:
-    File(os.path.join(params.hadoop_bin, "task-controller"),
-         owner="root",
-         group=params.mapred_tt_group,
-         mode=06050
-    )
-    tc_mode = 0644
     tc_owner = "root"
   else:
-    tc_mode = None
     tc_owner = params.hdfs_user
 
-  if tc_mode:
-    File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'),
-         owner = tc_owner,
-         mode = tc_mode,
-         group = params.mapred_tt_group,
-         content=Template("taskcontroller.cfg.j2")
-    )
-  else:
-    File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'),
-         owner=tc_owner,
-         content=Template("taskcontroller.cfg.j2")
-    )
-  for file in ['hadoop-env.sh', 'commons-logging.properties', 'slaves']:
+  for file in ['hadoop-env.sh', 'commons-logging.properties']:
     File(os.path.join(params.hadoop_conf_dir, file),
          owner=tc_owner,
          content=Template(file + ".j2")
@@ -160,6 +92,11 @@ def setup_hadoop():
        content=Template("hadoop-metrics2.properties.j2")
   )
 
+def setup_database():
+  """
+  Load DB
+  """
+  import params
   db_driver_dload_cmd = ""
   if params.server_db_name == 'oracle' and params.oracle_driver_url != "":
     db_driver_dload_cmd = format(
@@ -180,29 +117,6 @@ def setup_configs():
   """
   import params
 
-  if "mapred-queue-acls" in params.config['configurations']:
-    XmlConfig("mapred-queue-acls.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations'][
-                'mapred-queue-acls'],
-              owner=params.mapred_user,
-              group=params.user_group
-    )
-  elif os.path.exists(
-      os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml")):
-    File(os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml"),
-         owner=params.mapred_user,
-         group=params.user_group
-    )
-
-  if "hadoop-policy" in params.config['configurations']:
-    XmlConfig("hadoop-policy.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations']['hadoop-policy'],
-              owner=params.hdfs_user,
-              group=params.user_group
-    )
-
   XmlConfig("core-site.xml",
             conf_dir=params.hadoop_conf_dir,
             configurations=params.config['configurations']['core-site'],
@@ -210,68 +124,21 @@ def setup_configs():
             group=params.user_group
   )
 
-  if "mapred-site" in params.config['configurations']:
-    XmlConfig("mapred-site.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations']['mapred-site'],
-              owner=params.mapred_user,
-              group=params.user_group
-    )
-
   File(params.task_log4j_properties_location,
        content=StaticFile("task-log4j.properties"),
        mode=0755
   )
 
-  if "capacity-scheduler" in params.config['configurations']:
-    XmlConfig("capacity-scheduler.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations'][
-                'capacity-scheduler'],
-              owner=params.hdfs_user,
-              group=params.user_group
-    )
-
-  XmlConfig("hdfs-site.xml",
-            conf_dir=params.hadoop_conf_dir,
-            configurations=params.config['configurations']['hdfs-site'],
-            owner=params.hdfs_user,
-            group=params.user_group
-  )
-
-  # if params.stack_version[0] == "1":
-  #   Link('/usr/lib/hadoop/hadoop-tools.jar',
-  #         to = '/usr/lib/hadoop/lib/hadoop-tools.jar',
-  #         mode = 0755
-  #   )
-
   if os.path.exists(os.path.join(params.hadoop_conf_dir, 'configuration.xsl')):
     File(os.path.join(params.hadoop_conf_dir, 'configuration.xsl'),
          owner=params.hdfs_user,
          group=params.user_group
     )
-  if os.path.exists(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml')):
-    File(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml'),
-         owner=params.mapred_user,
-         group=params.user_group
-    )
   if os.path.exists(os.path.join(params.hadoop_conf_dir, 'masters')):
     File(os.path.join(params.hadoop_conf_dir, 'masters'),
               owner=params.hdfs_user,
               group=params.user_group
     )
-  if os.path.exists(
-      os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example')):
-    File(os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example'),
-         owner=params.mapred_user,
-         group=params.user_group
-    )
-  if os.path.exists(
-      os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example')):
-    File(os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example'),
-         owner=params.mapred_user,
-         group=params.user_group
-    )
 
   generate_include_file()
 

+ 3 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/datanode.py

@@ -19,6 +19,7 @@ limitations under the License.
 
 from resource_management import *
 from hdfs_datanode import datanode
+from hdfs import hdfs
 
 
 class DataNode(Script):
@@ -43,7 +44,8 @@ class DataNode(Script):
 
   def configure(self, env):
     import params
-
+    env.set_params(params)
+    hdfs()
     datanode(action="configure")
 
   def status(self, env):

+ 63 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py

@@ -0,0 +1,63 @@
+#!/usr/bin/env python2.6
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from resource_management import *
+import sys
+import os
+
+
+def hdfs(name=None):
+  import params
+
+  File(os.path.join(params.limits_conf_dir, 'hdfs.conf'),
+       owner='root',
+       group='root',
+       mode=0644,
+       content=Template("hdfs.conf.j2")
+  )
+
+  if params.security_enabled:
+    tc_mode = 0644
+    tc_owner = "root"
+  else:
+    tc_mode = None
+    tc_owner = params.hdfs_user
+
+  if "hadoop-policy" in params.config['configurations']:
+    XmlConfig("hadoop-policy.xml",
+              conf_dir=params.hadoop_conf_dir,
+              configurations=params.config['configurations']['hadoop-policy'],
+              owner=params.hdfs_user,
+              group=params.user_group
+    )
+
+  XmlConfig("hdfs-site.xml",
+            conf_dir=params.hadoop_conf_dir,
+            configurations=params.config['configurations']['hdfs-site'],
+            owner=params.hdfs_user,
+            group=params.user_group
+  )
+
+  File(os.path.join(params.hadoop_conf_dir, 'slaves'),
+       owner=tc_owner,
+       content=Template("slaves.j2")
+  )

+ 3 - 14
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_client.py

@@ -18,6 +18,7 @@ limitations under the License.
 """
 
 from resource_management import *
+from hdfs import hdfs
 from utils import service
 
 
@@ -44,20 +45,8 @@ class HdfsClient(Script):
 
   def config(self, env):
     import params
-
-    XmlConfig("core-site.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations']['core-site'],
-              owner=params.hdfs_user,
-              group=params.user_group
-    )
-    
-    XmlConfig("hdfs-site.xml",
-            conf_dir=params.hadoop_conf_dir,
-            configurations=params.config['configurations']['hdfs-site'],
-            owner=params.hdfs_user,
-            group=params.user_group
-    )
+    hdfs()
+    pass
 
 
 if __name__ == "__main__":

+ 3 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/journalnode.py

@@ -19,6 +19,7 @@ limitations under the License.
 
 from resource_management import *
 from utils import service
+from hdfs import hdfs
 
 
 class JournalNode(Script):
@@ -57,6 +58,8 @@ class JournalNode(Script):
               owner=params.hdfs_user,
               group=params.user_group
     )
+    env.set_params(params)
+    hdfs()
     pass
 
   def status(self, env):

+ 2 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py

@@ -19,6 +19,7 @@ limitations under the License.
 
 from resource_management import *
 from hdfs_namenode import namenode
+from hdfs import hdfs
 
 
 class NameNode(Script):
@@ -47,6 +48,7 @@ class NameNode(Script):
     import params
 
     env.set_params(params)
+    hdfs()
     namenode(action="configure")
     pass
 

+ 2 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py

@@ -160,3 +160,5 @@ HdfsDirectory = functools.partial(
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local
 )
+
+limits_conf_dir = "/etc/security/limits.d"

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/snamenode.py

@@ -19,6 +19,7 @@ limitations under the License.
 
 from resource_management import *
 from hdfs_snamenode import snamenode
+from hdfs import hdfs
 
 
 class SNameNode(Script):
@@ -49,7 +50,7 @@ class SNameNode(Script):
     import params
 
     env.set_params(params)
-
+    hdfs()
     snamenode(action="configure")
 
   def status(self, env):

+ 2 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/zkfc_slave.py

@@ -19,6 +19,7 @@ limitations under the License.
 
 from resource_management import *
 from utils import service
+from hdfs import hdfs
 
 
 class ZkfcSlave(Script):
@@ -48,6 +49,7 @@ class ZkfcSlave(Script):
     )
 
   def configure(self, env):
+    hdfs()
     pass
 
   def status(self, env):

+ 0 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/hdfs.conf.j2 → ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/templates/hdfs.conf.j2


+ 0 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/slaves.j2 → ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/templates/slaves.j2


+ 9 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py

@@ -124,3 +124,12 @@ HdfsDirectory = functools.partial(
   kinit_path_local = kinit_path_local
 )
 update_exclude_file_only = config['commandParams']['update_exclude_file_only']
+
+hadoop_bin = "/usr/lib/hadoop/sbin"
+mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
+
+#taskcontroller.cfg
+
+mapred_local_dir = "/tmp/hadoop-mapred/mapred/local"
+hdfs_log_dir_prefix = config['configurations']['global']['hdfs_log_dir_prefix']
+

+ 77 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py

@@ -22,6 +22,7 @@ Ambari Agent
 
 from resource_management import *
 import sys
+import os
 
 
 def yarn(name = None):
@@ -151,3 +152,79 @@ def yarn(name = None):
     )
 
 
+  if params.security_enabled:
+    tc_mode = 0644
+    tc_owner = "root"
+  else:
+    tc_mode = None
+    tc_owner = params.hdfs_user
+
+  if params.security_enabled:
+    File(os.path.join(params.hadoop_bin, "task-controller"),
+         owner="root",
+         group=params.mapred_tt_group,
+         mode=06050
+    )
+    File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'),
+         owner = tc_owner,
+         mode = tc_mode,
+         group = params.mapred_tt_group,
+         content=Template("taskcontroller.cfg.j2")
+    )
+  else:
+    File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'),
+         owner=tc_owner,
+         content=Template("taskcontroller.cfg.j2")
+    )
+
+  if "mapred-site" in params.config['configurations']:
+    XmlConfig("mapred-site.xml",
+              conf_dir=params.hadoop_conf_dir,
+              configurations=params.config['configurations']['mapred-site'],
+              owner=params.mapred_user,
+              group=params.user_group
+    )
+
+  if "mapred-queue-acls" in params.config['configurations']:
+    XmlConfig("mapred-queue-acls.xml",
+              conf_dir=params.hadoop_conf_dir,
+              configurations=params.config['configurations'][
+                'mapred-queue-acls'],
+              owner=params.mapred_user,
+              group=params.user_group
+    )
+  elif os.path.exists(
+    os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml")):
+    File(os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml"),
+         owner=params.mapred_user,
+         group=params.user_group
+    )
+
+  if "capacity-scheduler" in params.config['configurations']:
+    XmlConfig("capacity-scheduler.xml",
+              conf_dir=params.hadoop_conf_dir,
+              configurations=params.config['configurations'][
+                'capacity-scheduler'],
+              owner=params.hdfs_user,
+              group=params.user_group
+    )
+
+  if os.path.exists(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml')):
+    File(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml'),
+         owner=params.mapred_user,
+         group=params.user_group
+    )
+
+  if os.path.exists(
+    os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example')):
+    File(os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example'),
+         owner=params.mapred_user,
+         group=params.user_group
+    )
+
+  if os.path.exists(
+    os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example')):
+    File(os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example'),
+         owner=params.mapred_user,
+         group=params.user_group
+    )

+ 0 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/taskcontroller.cfg.j2 → ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/templates/taskcontroller.cfg.j2


+ 35 - 3
ambari-server/src/test/python/stacks/1.3.2/HDFS/test_datanode.py

@@ -105,6 +105,7 @@ class TestDatanode(RMFTestCase):
                               not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               user = 'root',
                               )
+    self.assertNoMoreResources()
 
   def test_stop_secured(self):
     self.executeScript("1.3.2/services/HDFS/package/scripts/datanode.py",
@@ -129,8 +130,23 @@ class TestDatanode(RMFTestCase):
                               ignore_failures = True,
                               )
     self.assertNoMoreResources()
-
   def assert_configure_default(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'hdfs',
+                              )
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -138,19 +154,35 @@ class TestDatanode(RMFTestCase):
                               recursive = True,
                               )
     self.assertResourceCalled('Directory', '/hadoop/hdfs',
+                              ignore_failures = True,
                               mode = 0755,
                               recursive = True,
-                              ignore_failures=True,
                               )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/data',
                               owner = 'hdfs',
+                              ignore_failures = True,
                               group = 'hadoop',
                               mode = 0750,
                               recursive = False,
-                              ignore_failures=True,
                               )
 
   def assert_configure_secured(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'root',
+                              )
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',

+ 38 - 0
ambari-server/src/test/python/stacks/1.3.2/HDFS/test_namenode.py

@@ -203,6 +203,25 @@ class TestNamenode(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'hdfs',
+                              )
+
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -211,6 +230,25 @@ class TestNamenode(RMFTestCase):
                               )
 
   def assert_configure_secured(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'root',
+                              )
+
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
                               owner = 'hdfs',
                               group = 'hadoop',

+ 35 - 0
ambari-server/src/test/python/stacks/1.3.2/HDFS/test_snamenode.py

@@ -75,6 +75,7 @@ class TestSNamenode(RMFTestCase):
                               action = ['delete'],
                               ignore_failures = True,
                               )
+    self.assertNoMoreResources()
 
   def test_configure_secured(self):
     self.executeScript("1.3.2/services/HDFS/package/scripts/snamenode.py",
@@ -104,6 +105,7 @@ class TestSNamenode(RMFTestCase):
                               not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
                               user = 'hdfs',
                               )
+    self.assertNoMoreResources()
 
   def test_stop_secured(self):
     self.executeScript("1.3.2/services/HDFS/package/scripts/snamenode.py",
@@ -127,8 +129,25 @@ class TestSNamenode(RMFTestCase):
                               action = ['delete'],
                               ignore_failures = True,
                               )
+    self.assertNoMoreResources()
 
   def assert_configure_default(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'hdfs',
+                              )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -137,6 +156,22 @@ class TestSNamenode(RMFTestCase):
                               )
 
   def assert_configure_secured(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'root',
+                              )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
                               owner = 'hdfs',
                               group = 'hadoop',

+ 27 - 1
ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_client.py

@@ -62,6 +62,16 @@ class TestMapreduceClient(RMFTestCase):
       owner = 'mapred',
       group = 'hadoop',
     )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
     self.assertNoMoreResources()
 
   def test_configure_secured(self):
@@ -99,5 +109,21 @@ class TestMapreduceClient(RMFTestCase):
       owner = 'mapred',
       group = 'hadoop',
     )
-
+    self.assertResourceCalled('File', '/usr/lib/hadoop/bin/task-controller',
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 06050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
     self.assertNoMoreResources()

+ 27 - 0
ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_historyserver.py

@@ -198,6 +198,16 @@ class TestHistoryServer(RMFTestCase):
       owner = 'mapred',
       group = 'hadoop',
     )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('HdfsDirectory', '/mapred',
@@ -285,3 +295,20 @@ class TestHistoryServer(RMFTestCase):
       owner = 'mapred',
       group = 'hadoop',
     )
+    self.assertResourceCalled('File', '/usr/lib/hadoop/bin/task-controller',
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 06050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )

+ 63 - 2
ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_jobtracker.py

@@ -22,6 +22,7 @@ from stacks.utils.RMFTestCase import *
 
 class TestJobtracker(RMFTestCase):
 
+  @patch("os.path.exists", new = MagicMock(return_value=True))
   def test_configure_default(self):
     self.executeScript("1.3.2/services/MAPREDUCE/package/scripts/jobtracker.py",
                        classname = "Jobtracker",
@@ -31,13 +32,13 @@ class TestJobtracker(RMFTestCase):
     self.assert_configure_default()
     self.assertNoMoreResources()
 
+  @patch("os.path.exists", new = MagicMock(return_value=True))
   def test_start_default(self):
     self.executeScript("1.3.2/services/MAPREDUCE/package/scripts/jobtracker.py",
                        classname = "Jobtracker",
                        command = "start",
                        config_file="default.json"
       )
-
     self.assert_configure_default()
     self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start jobtracker',
                        user = 'mapred',
@@ -98,6 +99,7 @@ class TestJobtracker(RMFTestCase):
                               )
     self.assertNoMoreResources()
 
+  @patch("os.path.exists", new = MagicMock(return_value=True))
   def test_configure_secured(self):
 
     self.executeScript("1.3.2/services/MAPREDUCE/package/scripts/jobtracker.py",
@@ -108,13 +110,13 @@ class TestJobtracker(RMFTestCase):
     self.assert_configure_secured()
     self.assertNoMoreResources()
 
+  @patch("os.path.exists", new = MagicMock(return_value=True))
   def test_start_secured(self):
     self.executeScript("1.3.2/services/MAPREDUCE/package/scripts/jobtracker.py",
                        classname = "Jobtracker",
                        command = "start",
                        config_file="secured.json"
     )
-
     self.assert_configure_secured()
     self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start jobtracker',
                        user = 'mapred',
@@ -257,6 +259,32 @@ class TestJobtracker(RMFTestCase):
       owner = 'mapred',
       group = 'hadoop',
     )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('HdfsDirectory', '/mapred',
@@ -349,3 +377,36 @@ class TestJobtracker(RMFTestCase):
       owner = 'mapred',
       group = 'hadoop',
     )
+    self.assertResourceCalled('File', '/usr/lib/hadoop/bin/task-controller',
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 06050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )

+ 28 - 3
ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_tasktracker.py

@@ -37,7 +37,6 @@ class TestTasktracker(RMFTestCase):
                          command = "start",
                          config_file="default.json"
       )
-
     self.assert_configure_default()
     self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start tasktracker',
                               user = 'mapred',
@@ -80,7 +79,6 @@ class TestTasktracker(RMFTestCase):
                          command = "start",
                          config_file="secured.json"
     )
-
     self.assert_configure_secured()
     self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start tasktracker',
                               user = 'mapred',
@@ -141,6 +139,16 @@ class TestTasktracker(RMFTestCase):
       owner = 'mapred',
       group = 'hadoop',
     )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/var/run/hadoop/mapred',
@@ -170,4 +178,21 @@ class TestTasktracker(RMFTestCase):
     self.assertResourceCalled('File', '/etc/hadoop/conf/mapred.include',
       owner = 'mapred',
       group = 'hadoop',
-    )
+    )
+    self.assertResourceCalled('File', '/usr/lib/hadoop/bin/task-controller',
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 06050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )

+ 48 - 0
ambari-server/src/test/python/stacks/1.3.2/hooks/after-INSTALL/test_after_install.py

@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+
+@patch("os.path.exists", new = MagicMock(return_value=True))
+class TestHookAfterInstall(RMFTestCase):
+  def test_hook_default(self):
+    self.executeScript("1.3.2/hooks/after-INSTALL/scripts/hook.py",
+                       classname="AfterInstallHook",
+                       command="hook",
+                       config_file="default.json"
+    )
+    self.assertResourceCalled('Directory', '/etc/hadoop/conf',
+                              owner = 'root',
+                              group = 'root',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
+                              content = Template('hadoop-env.sh.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['core-site'],
+                              )
+    self.assertNoMoreResources()

+ 60 - 29
ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py

@@ -28,50 +28,81 @@ class TestHookBeforeInstall(RMFTestCase):
                        command="hook",
                        config_file="default.json"
     )
+    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/ ; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz',
+                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
+                              path = ['/bin', '/usr/bin/'],
+                              )
+    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
+                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
+                              path = ['/bin', '/usr/bin/'],
+                              )
+    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
+                              not_if = 'test -e /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
+                              ignore_failures = True,
+                              path = ['/bin', '/usr/bin/'],
+                              )
     self.assertResourceCalled('Group', 'hadoop',)
     self.assertResourceCalled('Group', 'users',)
     self.assertResourceCalled('Group', 'users',)
     self.assertResourceCalled('User', 'ambari-qa',
-                              gid='hadoop',
-                              groups=[u'users'],)
+                              gid = 'hadoop',
+                              groups = [u'users'],
+                              )
     self.assertResourceCalled('File', '/tmp/changeUid.sh',
-                              content=StaticFile('changeToSecureUid.sh'),
-                              mode=0555,)
-    self.assertResourceCalled('Execute',
-                              '/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 2>/dev/null',
-                              not_if='test $(id -u ambari-qa) -gt 1000',)
+                              content = StaticFile('changeToSecureUid.sh'),
+                              mode = 0555,
+                              )
+    self.assertResourceCalled('Execute', '/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 2>/dev/null',
+                              not_if = 'test $(id -u ambari-qa) -gt 1000',
+                              )
     self.assertResourceCalled('User', 'hbase',
-                              gid='hadoop',
-                              groups=[u'hadoop'],)
+                              gid = 'hadoop',
+                              groups = [u'hadoop'],
+                              )
     self.assertResourceCalled('File', '/tmp/changeUid.sh',
-                              content=StaticFile('changeToSecureUid.sh'),
-                              mode=0555,)
-    self.assertResourceCalled('Execute',
-                              '/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/hadoop/hbase 2>/dev/null',
-                              not_if='test $(id -u hbase) -gt 1000',)
+                              content = StaticFile('changeToSecureUid.sh'),
+                              mode = 0555,
+                              )
+    self.assertResourceCalled('Execute', '/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/hadoop/hbase 2>/dev/null',
+                              not_if = 'test $(id -u hbase) -gt 1000',
+                              )
     self.assertResourceCalled('Group', 'nagios',)
-    self.assertResourceCalled('User', 'nagios', gid='nagios',)
-    self.assertResourceCalled('User', 'oozie', gid='hadoop',)
-    self.assertResourceCalled('User', 'hcat', gid='hadoop',)
-    self.assertResourceCalled('User', 'hcat', gid='hadoop',)
+    self.assertResourceCalled('User', 'nagios',
+                              gid = 'nagios',
+                              )
+    self.assertResourceCalled('User', 'oozie',
+                              gid = 'hadoop',
+                              )
+    self.assertResourceCalled('User', 'hcat',
+                              gid = 'hadoop',
+                              )
+    self.assertResourceCalled('User', 'hcat',
+                              gid = 'hadoop',
+                              )
     self.assertResourceCalled('User', 'hive',
-                              gid='hadoop',)
+                              gid = 'hadoop',
+                              )
     self.assertResourceCalled('Group', 'nobody',)
     self.assertResourceCalled('Group', 'nobody',)
     self.assertResourceCalled('User', 'nobody',
-                              gid='hadoop',
-                              groups=[u'nobody'],)
+                              gid = 'hadoop',
+                              groups = [u'nobody'],
+                              )
     self.assertResourceCalled('User', 'nobody',
-                              gid='hadoop',
-                              groups=[u'nobody'],)
+                              gid = 'hadoop',
+                              groups = [u'nobody'],
+                              )
     self.assertResourceCalled('User', 'hdfs',
-                              gid='hadoop',
-                              groups=[u'hadoop'],)
+                              gid = 'hadoop',
+                              groups = [u'hadoop'],
+                              )
     self.assertResourceCalled('User', 'mapred',
-                              gid='hadoop',
-                              groups=[u'hadoop'],)
+                              gid = 'hadoop',
+                              groups = [u'hadoop'],
+                              )
     self.assertResourceCalled('User', 'zookeeper',
-                              gid='hadoop',)
+                              gid = 'hadoop',
+                              )
     self.assertResourceCalled('Package', 'unzip',)
     self.assertResourceCalled('Package', 'net-snmp',)
-    self.assertNoMoreResources()
+    self.assertNoMoreResources()

+ 33 - 190
ambari-server/src/test/python/stacks/1.3.2/hooks/before-START/test_before_start.py

@@ -19,29 +19,18 @@ limitations under the License.
 '''
 
 from mock.mock import MagicMock, call, patch
+from resource_management import *
 from stacks.utils.RMFTestCase import *
 
 @patch("os.path.exists", new = MagicMock(return_value=True))
+@patch.object(Hook, "run_custom_hook")
 class TestHookBeforeStart(RMFTestCase):
-  def test_hook_default(self):
+  def test_hook_default(self, mockHook):
     self.executeScript("1.3.2/hooks/before-START/scripts/hook.py",
                        classname="BeforeConfigureHook",
                        command="hook",
                        config_file="default.json"
     )
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/ ; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz',
-                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
-                              path = ['/bin', '/usr/bin/'],
-                              )
-    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
-                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
-                              path = ['/bin', '/usr/bin/'],
-                              )
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              not_if = 'test -e /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              ignore_failures = True,
-                              path = ['/bin', '/usr/bin/'],
-                              )
     self.assertResourceCalled('Execute', '/bin/echo 0 > /selinux/enforce',
                               only_if = 'test -f /selinux/enforce',
                               )
@@ -49,120 +38,55 @@ class TestHookBeforeStart(RMFTestCase):
                               )
     self.assertResourceCalled('Execute', 'mkdir -p /usr/lib/hadoop/lib/native/Linux-amd64-64; ln -sf /usr/lib64/libsnappy.so /usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
                               )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf',
-                              owner = 'root',
-                              group = 'root',
-                              recursive = True,
-                              )
     self.assertResourceCalled('Directory', '/var/log/hadoop',
                               owner = 'root',
                               group = 'root',
                               recursive = True,
                               )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
-                              owner = 'root',
-                              group = 'root',
-                              recursive = True,
-                              )
-    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
-                              content = Template('hdfs.conf.j2'),
-                              owner = 'root',
-                              group = 'root',
-                              mode = 0644,
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
-                              content = Template('taskcontroller.cfg.j2'),
-                              owner = 'hdfs',
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
-                              content = Template('hadoop-env.sh.j2'),
-                              owner = 'hdfs',
-                              )
+                            owner = 'root',
+                            group = 'root',
+                            recursive = True,
+                            )
     self.assertResourceCalled('File', '/etc/hadoop/conf/commons-logging.properties',
                               content = Template('commons-logging.properties.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
-                              content = Template('slaves.j2'),
-                              owner = 'hdfs',
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/health_check',
                               content = Template('health_check.j2'),
                               owner = 'hdfs',
                               )
-    rca_properties = format('''
-
-log4j.appender.JHA=org.apache.ambari.log4j.hadoop.mapreduce.jobhistory.JobHistoryAppender
-log4j.appender.JHA.database=jdbc:postgresql://c6401.ambari.apache.org/ambarirca
-log4j.appender.JHA.driver=org.postgresql.Driver
-log4j.appender.JHA.user=mapred
-log4j.appender.JHA.password=mapred
-
-log4j.logger.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=DEBUG,JHA
-log4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true
-
-''')
-    self.maxDiff = None
-    self.assertResourceCalled('File',
-                              '/etc/hadoop/conf/log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hdfs',
-                              content='log4jproperties\nline2log4jproperties\nline2'+rca_properties
-    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/log4j.properties',
+                              content = 'log4jproperties\nline2log4jproperties\nline2\n\nlog4j.appender.JHA=org.apache.ambari.log4j.hadoop.mapreduce.jobhistory.JobHistoryAppender\nlog4j.appender.JHA.database=jdbc:postgresql://c6401.ambari.apache.org/ambarirca\nlog4j.appender.JHA.driver=org.postgresql.Driver\nlog4j.appender.JHA.user=mapred\nlog4j.appender.JHA.password=mapred\n\nlog4j.logger.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=DEBUG,JHA\nlog4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true\n\n',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-metrics2.properties',
-      content = Template('hadoop-metrics2.properties.j2'),
-      owner = 'hdfs',
-    )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
-      owner = 'mapred',
-      group = 'hadoop',
-    )
+                              content = Template('hadoop-metrics2.properties.j2'),
+                              owner = 'hdfs',
+                              )
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
-      owner = 'hdfs',
-      group = 'hadoop',
-      conf_dir = '/etc/hadoop/conf',
-      configurations = self.getConfig()['configurations']['core-site'],
-    )
-    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-      owner = 'mapred',
-      group = 'hadoop',
-      conf_dir = '/etc/hadoop/conf',
-      configurations = self.getConfig()['configurations']['mapred-site'],
-    )
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['core-site'],
+                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/task-log4j.properties',
-      content = StaticFile('task-log4j.properties'),
-      mode = 0755,
-    )
-    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
-      owner = 'hdfs',
-      group = 'hadoop',
-      conf_dir = '/etc/hadoop/conf',
-      configurations = self.getConfig()['configurations']['hdfs-site'],
-    )
+                              content = StaticFile('task-log4j.properties'),
+                              mode = 0755,
+                              )
     self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/hadoop-tools.jar',
-      to = '/usr/lib/hadoop/hadoop-tools.jar',
-    )
+                              to = '/usr/lib/hadoop/hadoop-tools.jar',
+                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/configuration.xsl',
-      owner = 'hdfs',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
-      owner = 'mapred',
-      group = 'hadoop',
-    )
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/masters',
       owner = 'hdfs',
       group = 'hadoop',
     )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
-      owner = 'mapred',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
-      owner = 'mapred',
-      group = 'hadoop',
-    )
     self.assertResourceCalled('File', '/etc/snmp/snmpd.conf',
                               content = Template('snmpd.conf.j2'),
                               )
@@ -171,30 +95,12 @@ log4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true
                               )
     self.assertNoMoreResources()
 
-  def test_hook_secured(self):
+  def test_hook_secured(self, mockHook):
     self.executeScript("1.3.2/hooks/before-START/scripts/hook.py",
                        classname="BeforeConfigureHook",
                        command="hook",
                        config_file="secured.json"
     )
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/ ; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz',
-                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
-                              path = ['/bin', '/usr/bin/'],
-                              )
-    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
-                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
-                              path = ['/bin', '/usr/bin/'],
-                              )
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              not_if = 'test -e /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              ignore_failures = True,
-                              path = ['/bin', '/usr/bin/'],
-                              )
-    self.assertResourceCalled('Execute', 'rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              path = ['/bin/', '/usr/bin'],
-                              only_if = 'test -e /usr/jdk64/jdk1.7.0_45/jre/lib/security && test -f /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              cwd = '/usr/jdk64/jdk1.7.0_45/jre/lib/security',
-                              )
     self.assertResourceCalled('Execute', '/bin/echo 0 > /selinux/enforce',
                               only_if = 'test -f /selinux/enforce',
                               )
@@ -202,11 +108,6 @@ log4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true
                               )
     self.assertResourceCalled('Execute', 'mkdir -p /usr/lib/hadoop/lib/native/Linux-amd64-64; ln -sf /usr/lib64/libsnappy.so /usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
                               )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf',
-                              owner = 'root',
-                              group = 'root',
-                              recursive = True,
-                              )
     self.assertResourceCalled('Directory', '/var/log/hadoop',
                               owner = 'root',
                               group = 'root',
@@ -217,75 +118,30 @@ log4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true
                               group = 'root',
                               recursive = True,
                               )
-    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
-                              content = Template('hdfs.conf.j2'),
-                              owner = 'root',
-                              group = 'root',
-                              mode = 0644,
-                              )
-    self.assertResourceCalled('File', '/usr/lib/hadoop/bin/task-controller',
-                              owner = 'root',
-                              group = 'hadoop',
-                              mode = 06050,
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
-                              content = Template('taskcontroller.cfg.j2'),
-                              owner = 'root',
-                              group = 'hadoop',
-                              mode = 0644,
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
-                              content = Template('hadoop-env.sh.j2'),
-                              owner = 'root',
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/commons-logging.properties',
-                              content = Template('commons-logging.properties.j2'),
-                              owner = 'root',
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
-                              content = Template('slaves.j2'),
-                              owner = 'root',
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/health_check',
                               content = Template('health_check.j2'),
                               owner = 'root',
                               )
     self.assertResourceCalled('File', '/etc/hadoop/conf/log4j.properties',
+                              content = 'log4jproperties\nline2',
                               owner = 'hdfs',
                               group = 'hadoop',
                               mode = 0644,
-                              content = 'log4jproperties\nline2'
                               )
     self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-metrics2.properties',
                               content = Template('hadoop-metrics2.properties.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              )
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',
                               conf_dir = '/etc/hadoop/conf',
                               configurations = self.getConfig()['configurations']['core-site'],
                               )
-    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
-                              configurations = self.getConfig()['configurations']['mapred-site'],
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/task-log4j.properties',
                               content = StaticFile('task-log4j.properties'),
                               mode = 0755,
                               )
-    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
-                              owner = 'hdfs',
-                              group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
-                              configurations = self.getConfig()['configurations']['hdfs-site'],
-                              )
     self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/hadoop-tools.jar',
                               to = '/usr/lib/hadoop/hadoop-tools.jar',
                               )
@@ -293,27 +149,14 @@ log4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true
                               owner = 'hdfs',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/masters',
                               owner = 'hdfs',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              )
     self.assertResourceCalled('File', '/etc/snmp/snmpd.conf',
                               content = Template('snmpd.conf.j2'),
                               )
     self.assertResourceCalled('Execute', 'service snmpd start; chkconfig snmpd on',
                               path = ['/usr/local/bin/:/bin/:/sbin/'],
-                             )
-    self.assertNoMoreResources()
-
+                              )
+    self.assertNoMoreResources()

+ 34 - 2
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py

@@ -152,6 +152,22 @@ class TestDatanode(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'hdfs',
+                              )
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -160,13 +176,29 @@ class TestDatanode(RMFTestCase):
                               )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/data',
                               owner = 'hdfs',
+                              ignore_failures = True,
                               group = 'hadoop',
                               mode = 0755,
                               recursive = True,
-                              ignore_failures=True,
                               )
 
   def assert_configure_secured(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'root',
+                              )
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -175,8 +207,8 @@ class TestDatanode(RMFTestCase):
                               )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/data',
                               owner = 'hdfs',
+                              ignore_failures = True,
                               group = 'hadoop',
                               mode = 0755,
                               recursive = True,
-                              ignore_failures=True,
                               )

+ 32 - 1
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py

@@ -157,7 +157,22 @@ class TestJournalnode(RMFTestCase):
                               group = 'hadoop',
                               recursive = True,
                               )
-
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'hdfs',
+                              )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/grid/0/hdfs/journal',
@@ -165,3 +180,19 @@ class TestJournalnode(RMFTestCase):
                               group = 'hadoop',
                               recursive = True,
                               )
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'root',
+                              )

+ 34 - 2
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py

@@ -307,7 +307,7 @@ class TestNamenode(RMFTestCase):
                        command = "start",
                        config_file="ha_secured.json"
     )
-    self.assert_configure_default()
+    self.assert_configure_secured()
     self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
                               owner = 'hdfs',
                               content = Template('exclude_hosts_list.j2'),
@@ -370,6 +370,22 @@ class TestNamenode(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'hdfs',
+                              )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -378,9 +394,25 @@ class TestNamenode(RMFTestCase):
                               )
 
   def assert_configure_secured(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'root',
+                              )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
                               owner = 'hdfs',
                               group = 'hadoop',
                               recursive = True,
                               mode = 0755,
-                              )
+                              )

+ 33 - 1
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py

@@ -152,6 +152,22 @@ class TestSNamenode(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'hdfs',
+                              )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -160,9 +176,25 @@ class TestSNamenode(RMFTestCase):
                               )
 
   def assert_configure_secured(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'root',
+                              )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
                               owner = 'hdfs',
                               group = 'hadoop',
                               mode = 0755,
                               recursive = True,
-                              )
+                              )

+ 34 - 2
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py

@@ -29,6 +29,22 @@ class TestZkfc(RMFTestCase):
                        command = "start",
                        config_file="ha_default.json"
     )
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'hdfs',
+                              )
     self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
                               owner = 'hdfs',
                               recursive = True,
@@ -39,8 +55,8 @@ class TestZkfc(RMFTestCase):
                               )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
                               action = ['delete'],
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
                               ignore_failures = True,
-                              not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
                               )
     self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc',
                               not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
@@ -84,6 +100,22 @@ class TestZkfc(RMFTestCase):
                        command = "start",
                        config_file="ha_secured.json"
     )
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'root',
+                              )
     self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
                               owner = 'hdfs',
                               recursive = True,
@@ -94,8 +126,8 @@ class TestZkfc(RMFTestCase):
                               )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
                               action = ['delete'],
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
                               ignore_failures = True,
-                              not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
                               )
     self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc',
                               not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',

+ 0 - 0
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py → ambari-server/src/test/python/stacks/2.0.6/HIVE/_test_hive_service_check.py


+ 39 - 0
ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py

@@ -261,6 +261,22 @@ class TestHistoryServer(RMFTestCase):
       group = 'hadoop',
       mode = 0755,
     )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('HdfsDirectory', '/app-logs',
@@ -413,3 +429,26 @@ class TestHistoryServer(RMFTestCase):
       group = 'hadoop',
       mode = 0644,
     )
+    self.assertResourceCalled('File', '/usr/lib/hadoop/sbin/task-controller',
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 06050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )

+ 39 - 0
ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py

@@ -120,6 +120,22 @@ class TestMapReduce2Client(RMFTestCase):
       group = 'hadoop',
       mode = 0755,
     )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )
     self.assertNoMoreResources()
 
   def test_configure_secured(self):
@@ -219,5 +235,28 @@ class TestMapReduce2Client(RMFTestCase):
       group = 'hadoop',
       mode = 0644,
     )
+    self.assertResourceCalled('File', '/usr/lib/hadoop/sbin/task-controller',
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 06050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )
     self.assertNoMoreResources()
 

+ 39 - 1
ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py

@@ -261,6 +261,22 @@ class TestNodeManager(RMFTestCase):
       group = 'hadoop',
       mode = 0755,
     )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('HdfsDirectory', '/app-logs',
@@ -413,4 +429,26 @@ class TestNodeManager(RMFTestCase):
       group = 'hadoop',
       mode = 0644,
     )
-    
+    self.assertResourceCalled('File', '/usr/lib/hadoop/sbin/task-controller',
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 06050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )

+ 40 - 1
ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py

@@ -205,6 +205,22 @@ class TestResourceManager(RMFTestCase):
       group = 'hadoop',
       mode = 0755,
     )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )
 
   def assert_configure_secured(self):
 
@@ -301,4 +317,27 @@ class TestResourceManager(RMFTestCase):
       content = Template('container-executor.cfg.j2'),
       group = 'hadoop',
       mode = 0644,
-    )
+    )
+    self.assertResourceCalled('File', '/usr/lib/hadoop/sbin/task-controller',
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 06050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )

+ 55 - 0
ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py

@@ -121,6 +121,22 @@ class TestYarnClient(RMFTestCase):
       group = 'hadoop',
       mode = 0755,
     )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )
     self.assertNoMoreResources()
 
   def test_configure_secured(self):
@@ -220,6 +236,29 @@ class TestYarnClient(RMFTestCase):
       group = 'hadoop',
       mode = 0644,
     )
+    self.assertResourceCalled('File', '/usr/lib/hadoop/sbin/task-controller',
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 06050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )
     self.assertNoMoreResources()
 
   def test_restart_client(self):
@@ -321,6 +360,22 @@ class TestYarnClient(RMFTestCase):
       group = 'hadoop',
       mode = 0755,
     )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )
     self.assertNoMoreResources()
 
 

+ 13 - 0
ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py

@@ -28,6 +28,19 @@ class TestHookBeforeInstall(RMFTestCase):
                        command="hook",
                        config_file="default.json"
     )
+    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/ ; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz',
+                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
+                              path = ['/bin', '/usr/bin/'],
+                              )
+    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
+                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
+                              path = ['/bin', '/usr/bin/'],
+                              )
+    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
+                              not_if = 'test -e /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
+                              ignore_failures = True,
+                              path = ['/bin', '/usr/bin/'],
+                              )
     self.assertResourceCalled('Group', 'hadoop', )
     self.assertResourceCalled('Group', 'users', )
     self.assertResourceCalled('Group', 'users', )

+ 2 - 136
ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py

@@ -29,19 +29,6 @@ class TestHookBeforeStart(RMFTestCase):
                        command="hook",
                        config_file="default.json"
     )
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/ ; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz',
-                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
-                              path = ['/bin', '/usr/bin/'],
-                              )
-    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
-                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
-                              path = ['/bin', '/usr/bin/'],
-                              )
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              not_if = 'test -e /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              ignore_failures = True,
-                              path = ['/bin', '/usr/bin/'],
-                              )
     self.assertResourceCalled('Execute', '/bin/echo 0 > /selinux/enforce',
                               only_if = 'test -f /selinux/enforce',
                               )
@@ -68,16 +55,6 @@ class TestHookBeforeStart(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
-    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
-                              content = Template('hdfs.conf.j2'),
-                              owner = 'root',
-                              group = 'root',
-                              mode = 0644,
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
-                              content = Template('taskcontroller.cfg.j2'),
-                              owner = 'hdfs',
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
                               content = Template('hadoop-env.sh.j2'),
                               owner = 'hdfs',
@@ -86,10 +63,6 @@ class TestHookBeforeStart(RMFTestCase):
                               content = Template('commons-logging.properties.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
-                              content = Template('slaves.j2'),
-                              owner = 'hdfs',
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/health_check',
                               content = Template('health_check-v2.j2'),
                               owner = 'hdfs',
@@ -105,64 +78,30 @@ class TestHookBeforeStart(RMFTestCase):
                               content = Template('hadoop-metrics2.properties.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              )
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',
                               conf_dir = '/etc/hadoop/conf',
                               configurations = self.getConfig()['configurations']['core-site'],
                               )
-    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
-                              configurations = self.getConfig()['configurations']['mapred-site'],
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/task-log4j.properties',
                               content = StaticFile('task-log4j.properties'),
                               mode = 0755,
                               )
-    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
-                              owner = 'hdfs',
-                              group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
-                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
-                              )
-    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
-                              owner = 'hdfs',
-                              group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
-                              configurations = self.getConfig()['configurations']['hdfs-site'],
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/configuration.xsl',
       owner = 'hdfs',
       group = 'hadoop',
     )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
-      owner = 'mapred',
-      group = 'hadoop',
-    )
     self.assertResourceCalled('File', '/etc/hadoop/conf/masters',
       owner = 'hdfs',
       group = 'hadoop',
     )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
-      owner = 'mapred',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
-      owner = 'mapred',
-      group = 'hadoop',
-    )
     self.assertResourceCalled('File', '/etc/snmp/snmpd.conf',
                               content = Template('snmpd.conf.j2'),
                               )
     self.assertResourceCalled('Execute', 'service snmpd start; chkconfig snmpd on',
-                            path = ['/usr/local/bin/:/bin/:/sbin/'],
-                            )
+                              path = ['/usr/local/bin/:/bin/:/sbin/'],
+                              )
     self.assertNoMoreResources()
 
   def test_hook_secured(self):
@@ -171,24 +110,6 @@ class TestHookBeforeStart(RMFTestCase):
                        command="hook",
                        config_file="secured.json"
     )
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/ ; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz',
-                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
-                              path = ['/bin', '/usr/bin/'],
-                              )
-    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
-                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
-                              path = ['/bin', '/usr/bin/'],
-                              )
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              not_if = 'test -e /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              ignore_failures = True,
-                              path = ['/bin', '/usr/bin/'],
-                              )
-    self.assertResourceCalled('Execute', 'rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              path = ['/bin/', '/usr/bin'],
-                              only_if = 'test -e /usr/jdk64/jdk1.7.0_45/jre/lib/security && test -f /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              cwd = '/usr/jdk64/jdk1.7.0_45/jre/lib/security',
-                              )
     self.assertResourceCalled('Execute', '/bin/echo 0 > /selinux/enforce',
                               only_if = 'test -f /selinux/enforce',
                               )
@@ -215,23 +136,6 @@ class TestHookBeforeStart(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
-    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
-                              content = Template('hdfs.conf.j2'),
-                              owner = 'root',
-                              group = 'root',
-                              mode = 0644,
-                              )
-    self.assertResourceCalled('File', '/usr/lib/hadoop/sbin/task-controller',
-                              owner = 'root',
-                              group = 'hadoop',
-                              mode = 06050,
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
-                              content = Template('taskcontroller.cfg.j2'),
-                              owner = 'root',
-                              group = 'hadoop',
-                              mode = 0644,
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
                               content = Template('hadoop-env.sh.j2'),
                               owner = 'root',
@@ -240,10 +144,6 @@ class TestHookBeforeStart(RMFTestCase):
                               content = Template('commons-logging.properties.j2'),
                               owner = 'root',
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
-                              content = Template('slaves.j2'),
-                              owner = 'root',
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/health_check',
                               content = Template('health_check-v2.j2'),
                               owner = 'root',
@@ -259,58 +159,24 @@ class TestHookBeforeStart(RMFTestCase):
                               content = Template('hadoop-metrics2.properties.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              )
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',
                               conf_dir = '/etc/hadoop/conf',
                               configurations = self.getConfig()['configurations']['core-site'],
                               )
-    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
-                              configurations = self.getConfig()['configurations']['mapred-site'],
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/task-log4j.properties',
                               content = StaticFile('task-log4j.properties'),
                               mode = 0755,
                               )
-    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
-                              owner = 'hdfs',
-                              group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
-                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
-                              )
-    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
-                              owner = 'hdfs',
-                              group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
-                              configurations = self.getConfig()['configurations']['hdfs-site'],
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/configuration.xsl',
                               owner = 'hdfs',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/masters',
                               owner = 'hdfs',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              )
     self.assertResourceCalled('File', '/etc/snmp/snmpd.conf',
                               content = Template('snmpd.conf.j2'),
                               )

+ 16 - 0
ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py

@@ -155,3 +155,19 @@ class TestAppTimelineServer(RMFTestCase):
                               group = 'hadoop',
                               mode = 0755,
                               )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )