Browse Source

AMBARI-7564. Storm failed to restart after adding HDFS service to a cluster (aonishuk)

Andrew Onishuk 10 years ago
parent
commit
11405289e5
27 changed files with 589 additions and 368 deletions
  1. 12 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
  2. 0 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/hook.py
  3. 4 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/params.py
  4. 8 29
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/shared_initialization.py
  5. 0 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/files/changeToSecureUid.sh
  6. 3 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/hook.py
  7. 102 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/params.py
  8. 58 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/shared_initialization.py
  9. 0 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/hook.py
  10. 0 35
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
  11. 0 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/hook.py
  12. 0 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py
  13. 0 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
  14. 0 21
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
  15. 0 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/files/changeToSecureUid.sh
  16. 2 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py
  17. 106 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
  18. 56 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
  19. 0 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/hook.py
  20. 0 35
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
  21. 0 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py
  22. 0 13
      ambari-server/src/test/python/stacks/1.3.2/hooks/after-INSTALL/test_after_install.py
  23. 119 0
      ambari-server/src/test/python/stacks/1.3.2/hooks/before-ANY/test_before_any.py
  24. 0 106
      ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py
  25. 0 13
      ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
  26. 119 0
      ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
  27. 0 106
      ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py

+ 12 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java

@@ -21,11 +21,13 @@ package org.apache.ambari.server.controller;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMPONENT_CATEGORY;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.CUSTOM_COMMAND;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.GROUP_LIST;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.REPO_INFO;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_PACKAGE_FOLDER;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.USER_LIST;
 
 import java.util.ArrayList;
 import java.util.Collections;
@@ -65,6 +67,7 @@ import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.State;
+import org.apache.ambari.server.state.PropertyInfo.PropertyType;
 import org.apache.ambari.server.state.stack.OsFamily;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpInProgressEvent;
 import org.apache.ambari.server.utils.StageUtils;
@@ -296,6 +299,15 @@ public class AmbariCustomCommandExecutionHelper {
       // Set parameters required for re-installing clients on restart
       hostLevelParams.put(REPO_INFO, getRepoInfo
         (cluster, host));
+      
+      Set<String> userSet = configHelper.getPropertyValuesWithPropertyType(stackId, PropertyType.USER, cluster);
+      String userList = gson.toJson(userSet);
+      hostLevelParams.put(USER_LIST, userList);
+      
+      Set<String> groupSet = configHelper.getPropertyValuesWithPropertyType(stackId, PropertyType.GROUP, cluster);
+      String groupList = gson.toJson(groupSet);
+      hostLevelParams.put(GROUP_LIST, groupList);
+      
       execCmd.setHostLevelParams(hostLevelParams);
 
       Map<String, String> commandParams = new TreeMap<String, String>();

+ 0 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/hook.py

@@ -28,7 +28,6 @@ class AfterInstallHook(Hook):
     import params
 
     env.set_params(params)
-    setup_hadoop_env()
     setup_config()
 
 if __name__ == "__main__":

+ 4 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/params.py

@@ -59,4 +59,7 @@ hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
 
 #users and groups
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-user_group = config['configurations']['cluster-env']['user_group']
+user_group = config['configurations']['cluster-env']['user_group']
+
+namenode_host = default("/clusterHostInfo/namenode_host", [])
+has_namenode = not len(namenode_host) == 0

+ 8 - 29
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/shared_initialization.py

@@ -19,34 +19,13 @@ limitations under the License.
 import os
 from resource_management import *
 
-def setup_hadoop_env():
-  import params
-  if params.security_enabled:
-    tc_owner = "root"
-  else:
-    tc_owner = params.hdfs_user
-    
-  Directory(params.hadoop_conf_empty_dir,
-            recursive=True,
-            owner='root',
-            group='root'
-  )
-  Link(params.hadoop_conf_dir,
-       to=params.hadoop_conf_empty_dir,
-       not_if=format("ls {hadoop_conf_dir}")
-  )
-  
-  File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
-       owner=tc_owner,
-       content=InlineTemplate(params.hadoop_env_sh_template)
-  )
-
 def setup_config():
   import params
-  XmlConfig("core-site.xml",
-            conf_dir=params.hadoop_conf_dir,
-            configurations=params.config['configurations']['core-site'],
-            configuration_attributes=params.config['configuration_attributes']['core-site'],
-            owner=params.hdfs_user,
-            group=params.user_group
-  )
+  if params.has_namenode:
+    XmlConfig("core-site.xml",
+              conf_dir=params.hadoop_conf_dir,
+              configurations=params.config['configurations']['core-site'],
+              configuration_attributes=params.config['configuration_attributes']['core-site'],
+              owner=params.hdfs_user,
+              group=params.user_group
+    )

+ 0 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/files/changeToSecureUid.sh → ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/files/changeToSecureUid.sh


+ 3 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/hook.py

@@ -27,6 +27,9 @@ class BeforeAnyHook(Hook):
     env.set_params(params)
     
     setup_jce()
+    setup_users()
+    setup_hadoop_env()
+
 
 if __name__ == "__main__":
   BeforeAnyHook().execute()

+ 102 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/params.py

@@ -18,6 +18,8 @@ limitations under the License.
 """
 
 from resource_management import *
+import collections
+import json
 
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
@@ -31,3 +33,103 @@ jdk_name = default("/hostLevelParams/jdk_name", None)
 java_home = config['hostLevelParams']['java_home']
 
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
+
+
+#RPM versioning support
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
+
+#hadoop params
+if rpm_version:
+  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
+else:
+  mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+
+hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+versioned_hdp_root = '/usr/hdp/current'
+#security params
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+#java params
+java_home = config['hostLevelParams']['java_home']
+#hadoop params
+hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
+hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
+
+#hadoop-env.sh
+java_home = config['hostLevelParams']['java_home']
+
+if str(config['hostLevelParams']['stack_version']).startswith('2.0') and System.get_instance().os_family != "suse":
+  # deprecated rhel jsvc_path
+  jsvc_path = "/usr/libexec/bigtop-utils"
+else:
+  jsvc_path = "/usr/lib/bigtop-utils"
+
+hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
+namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
+namenode_opt_newsize =  config['configurations']['hadoop-env']['namenode_opt_newsize']
+namenode_opt_maxnewsize =  config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
+
+jtnode_opt_newsize = "200m"
+jtnode_opt_maxnewsize = "200m"
+jtnode_heapsize =  "1024m"
+ttnode_heapsize = "1024m"
+
+dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
+mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
+mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
+hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
+
+#users and groups
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+user_group = config['configurations']['cluster-env']['user_group']
+
+hagios_server_hosts = default("/clusterHostInfo/nagios_server_host", [])
+ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
+namenode_host = default("/clusterHostInfo/namenode_host", [])
+hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
+
+has_namenode = not len(namenode_host) == 0
+has_nagios = not len(hagios_server_hosts) == 0
+has_ganglia_server = not len(ganglia_server_hosts) == 0
+has_tez = 'tez-site' in config['configurations']
+has_hbase_masters = not len(hbase_master_hosts) == 0
+
+hbase_tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir']
+
+#users and groups
+hbase_user = config['configurations']['hbase-env']['hbase_user']
+nagios_user = config['configurations']['nagios-env']['nagios_user']
+smoke_user =  config['configurations']['cluster-env']['smokeuser']
+gmetad_user = config['configurations']['ganglia-env']["gmetad_user"]
+gmond_user = config['configurations']['ganglia-env']["gmond_user"]
+
+user_group = config['configurations']['cluster-env']['user_group']
+proxyuser_group =  default("/configurations/hadoop-env/proxyuser_group","users")
+nagios_group = config['configurations']['nagios-env']['nagios_group']
+
+ignore_groupsusers_create = default("/configurations/cluster-env/ignore_groupsusers_create", False)
+
+smoke_user_dirs = format("/tmp/hadoop-{smoke_user},/tmp/hsperfdata_{smoke_user},/home/{smoke_user},/tmp/{smoke_user},/tmp/sqoop-{smoke_user}")
+if has_hbase_masters:
+  hbase_user_dirs = format("/home/{hbase_user},/tmp/{hbase_user},/usr/bin/{hbase_user},/var/log/{hbase_user},{hbase_tmp_dir}")
+#repo params
+repo_info = config['hostLevelParams']['repo_info']
+service_repo_info = default("/hostLevelParams/service_repo_info",None)
+
+user_to_groups_dict = collections.defaultdict(lambda:[user_group])
+user_to_groups_dict[smoke_user] = [proxyuser_group]
+if has_ganglia_server:
+  user_to_groups_dict[gmond_user] = [gmond_user]
+  user_to_groups_dict[gmetad_user] = [gmetad_user]
+if has_tez:
+  user_to_groups_dict[tez_user] = [proxyuser_group]
+
+user_to_gid_dict = collections.defaultdict(lambda:user_group)
+if has_nagios:
+  user_to_gid_dict[nagios_user] = nagios_group
+
+user_list = json.loads(config['hostLevelParams']['user_list'])
+group_list = json.loads(config['hostLevelParams']['group_list'])

+ 58 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/shared_initialization.py

@@ -56,3 +56,61 @@ def setup_jce():
             cwd  = security_dir,
             path = ['/bin/','/usr/bin']
     )
+    
+
+def setup_users():
+  """
+  Creates users before cluster installation
+  """
+  import params
+  
+  for group in params.group_list:
+    Group(group,
+        ignore_failures = params.ignore_groupsusers_create
+    )
+    
+  for user in params.user_list:
+    User(user,
+        gid = params.user_to_gid_dict[user],
+        groups = params.user_to_groups_dict[user],
+        ignore_failures = params.ignore_groupsusers_create       
+    )
+           
+  set_uid(params.smoke_user, params.smoke_user_dirs)
+
+  if params.has_hbase_masters:
+    set_uid(params.hbase_user, params.hbase_user_dirs)
+    
+def set_uid(user, user_dirs):
+  """
+  user_dirs - comma separated directories
+  """
+  import params
+
+  File(format("{tmp_dir}/changeUid.sh"),
+       content=StaticFile("changeToSecureUid.sh"),
+       mode=0555)
+  Execute(format("{tmp_dir}/changeUid.sh {user} {user_dirs} 2>/dev/null"),
+          not_if = format("test $(id -u {user}) -gt 1000"))
+    
+def setup_hadoop_env():
+  import params
+  if params.has_namenode:
+    if params.security_enabled:
+      tc_owner = "root"
+    else:
+      tc_owner = params.hdfs_user
+    Directory(params.hadoop_conf_empty_dir,
+              recursive=True,
+              owner='root',
+              group='root'
+    )
+    Link(params.hadoop_conf_dir,
+         to=params.hadoop_conf_empty_dir,
+         not_if=format("ls {hadoop_conf_dir}")
+    )
+    File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
+         owner=tc_owner,
+         content=InlineTemplate(params.hadoop_env_sh_template)
+    )
+

+ 0 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/hook.py

@@ -32,7 +32,6 @@ class BeforeInstallHook(Hook):
     
     install_repos()
     setup_java()
-    setup_users()
     install_packages()
 
 if __name__ == "__main__":

+ 0 - 35
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py

@@ -21,41 +21,6 @@ import os
 
 from resource_management import *
 
-def setup_users():
-  """
-  Creates users before cluster installation
-  """
-  import params
-  
-  for group in params.group_list:
-    Group(group,
-        ignore_failures = params.ignore_groupsusers_create
-    )
-    
-  for user in params.user_list: 
-    User(user,
-        gid = params.user_to_gid_dict[user],
-        groups = params.user_to_groups_dict[user],
-        ignore_failures = params.ignore_groupsusers_create        
-    )
-  
-  set_uid(params.smoke_user, params.smoke_user_dirs)
-
-  if params.has_hbase_masters:
-    set_uid(params.hbase_user, params.hbase_user_dirs)
-
-def set_uid(user, user_dirs):
-  """
-  user_dirs - comma separated directories
-  """
-  import params
-
-  File(format("{tmp_dir}/changeUid.sh"),
-       content=StaticFile("changeToSecureUid.sh"),
-       mode=0555)
-  Execute(format("{tmp_dir}/changeUid.sh {user} {user_dirs} 2>/dev/null"),
-          not_if = format("test $(id -u {user}) -gt 1000"))
-
 def setup_java():
   """
   Installs jdk using specific params, that comes from ambari-server

+ 0 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/hook.py

@@ -27,7 +27,6 @@ class BeforeStartHook(Hook):
     import params
 
     self.run_custom_hook('before-ANY')
-    self.run_custom_hook('after-INSTALL')
     env.set_params(params)
     
     setup_hadoop()

+ 0 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py

@@ -29,7 +29,6 @@ class AfterInstallHook(Hook):
 
     env.set_params(params)
     setup_hdp_install_directory()
-    setup_hadoop_env()
     setup_config()
 
 if __name__ == "__main__":

+ 0 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py

@@ -44,7 +44,6 @@ java_home = config['hostLevelParams']['java_home']
 hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
 hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
 hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
-hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
 
 #hadoop-env.sh
 java_home = config['hostLevelParams']['java_home']

+ 0 - 21
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py

@@ -27,27 +27,6 @@ def setup_hdp_install_directory():
             only_if=format('ls -d /usr/hdp/{rpm_version}-*')
     )
 
-def setup_hadoop_env():
-  import params
-  if params.has_namenode:
-    if params.security_enabled:
-      tc_owner = "root"
-    else:
-      tc_owner = params.hdfs_user
-    Directory(params.hadoop_conf_empty_dir,
-              recursive=True,
-              owner='root',
-              group='root'
-    )
-    Link(params.hadoop_conf_dir,
-         to=params.hadoop_conf_empty_dir,
-         not_if=format("ls {hadoop_conf_dir}")
-    )
-    File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
-         owner=tc_owner,
-         content=InlineTemplate(params.hadoop_env_sh_template)
-    )
-
 def setup_config():
   import params
   if params.has_namenode:

+ 0 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/files/changeToSecureUid.sh → ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/files/changeToSecureUid.sh


+ 2 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py

@@ -27,6 +27,8 @@ class BeforeAnyHook(Hook):
     env.set_params(params)
     
     setup_jce()
+    setup_users()
+    setup_hadoop_env()
 
 if __name__ == "__main__":
   BeforeAnyHook().execute()

+ 106 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py

@@ -18,12 +18,12 @@ limitations under the License.
 """
 
 from resource_management import *
+import collections
+import json
 
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-
 artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
 jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
 jce_location = config['hostLevelParams']['jdk_location']
@@ -31,3 +31,107 @@ jdk_name = default("/hostLevelParams/jdk_name", None)
 java_home = config['hostLevelParams']['java_home']
 
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
+
+#RPM versioning support
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
+
+#hadoop params
+if rpm_version:
+  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
+else:
+  mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+
+hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+versioned_hdp_root = '/usr/hdp/current'
+#security params
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+#java params
+java_home = config['hostLevelParams']['java_home']
+#hadoop params
+hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
+hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
+
+#hadoop-env.sh
+java_home = config['hostLevelParams']['java_home']
+
+if str(config['hostLevelParams']['stack_version']).startswith('2.0') and System.get_instance().os_family != "suse":
+  # deprecated rhel jsvc_path
+  jsvc_path = "/usr/libexec/bigtop-utils"
+else:
+  jsvc_path = "/usr/lib/bigtop-utils"
+
+hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
+namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
+namenode_opt_newsize =  config['configurations']['hadoop-env']['namenode_opt_newsize']
+namenode_opt_maxnewsize =  config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
+
+jtnode_opt_newsize = "200m"
+jtnode_opt_maxnewsize = "200m"
+jtnode_heapsize =  "1024m"
+ttnode_heapsize = "1024m"
+
+dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
+mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
+mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
+hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
+
+#users and groups
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hbase_user = config['configurations']['hbase-env']['hbase_user']
+nagios_user = config['configurations']['nagios-env']['nagios_user']
+smoke_user =  config['configurations']['cluster-env']['smokeuser']
+gmetad_user = config['configurations']['ganglia-env']["gmetad_user"]
+gmond_user = config['configurations']['ganglia-env']["gmond_user"]
+tez_user = config['configurations']['tez-env']["tez_user"]
+
+user_group = config['configurations']['cluster-env']['user_group']
+proxyuser_group =  default("/configurations/hadoop-env/proxyuser_group","users")
+nagios_group = config['configurations']['nagios-env']['nagios_group']
+
+hagios_server_hosts = default("/clusterHostInfo/nagios_server_host", [])
+ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
+namenode_host = default("/clusterHostInfo/namenode_host", [])
+hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
+
+has_namenode = not len(namenode_host) == 0
+has_nagios = not len(hagios_server_hosts) == 0
+has_ganglia_server = not len(ganglia_server_hosts) == 0
+has_tez = 'tez-site' in config['configurations']
+has_hbase_masters = not len(hbase_master_hosts) == 0
+
+hbase_tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir']
+
+hbase_user = config['configurations']['hbase-env']['hbase_user']
+smoke_user =  config['configurations']['cluster-env']['smokeuser']
+
+user_group = config['configurations']['cluster-env']['user_group']
+proxyuser_group = default("/configurations/hadoop-env/proxyuser_group","users")
+nagios_group = config['configurations']['nagios-env']['nagios_group']
+
+ignore_groupsusers_create = default("/configurations/cluster-env/ignore_groupsusers_create", False)
+
+smoke_user_dirs = format("/tmp/hadoop-{smoke_user},/tmp/hsperfdata_{smoke_user},/home/{smoke_user},/tmp/{smoke_user},/tmp/sqoop-{smoke_user}")
+if has_hbase_masters:
+  hbase_user_dirs = format("/home/{hbase_user},/tmp/{hbase_user},/usr/bin/{hbase_user},/var/log/{hbase_user},{hbase_tmp_dir}")
+#repo params
+repo_info = config['hostLevelParams']['repo_info']
+service_repo_info = default("/hostLevelParams/service_repo_info",None)
+
+user_to_groups_dict = collections.defaultdict(lambda:[user_group])
+user_to_groups_dict[smoke_user] = [proxyuser_group]
+if has_ganglia_server:
+  user_to_groups_dict[gmond_user] = [gmond_user]
+  user_to_groups_dict[gmetad_user] = [gmetad_user]
+if has_tez:
+  user_to_groups_dict[tez_user] = [proxyuser_group]
+
+user_to_gid_dict = collections.defaultdict(lambda:user_group)
+if has_nagios:
+  user_to_gid_dict[nagios_user] = nagios_group
+
+user_list = json.loads(config['hostLevelParams']['user_list'])
+group_list = json.loads(config['hostLevelParams']['group_list'])

+ 56 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py

@@ -56,3 +56,59 @@ def setup_jce():
             cwd  = security_dir,
             path = ['/bin/','/usr/bin']
     )
+
+def setup_users():
+  """
+  Creates users before cluster installation
+  """
+  import params
+  
+  for group in params.group_list:
+    Group(group,
+        ignore_failures = params.ignore_groupsusers_create
+    )
+    
+  for user in params.user_list:
+    User(user,
+        gid = params.user_to_gid_dict[user],
+        groups = params.user_to_groups_dict[user],
+        ignore_failures = params.ignore_groupsusers_create       
+    )
+           
+  set_uid(params.smoke_user, params.smoke_user_dirs)
+
+  if params.has_hbase_masters:
+    set_uid(params.hbase_user, params.hbase_user_dirs)
+    
+def set_uid(user, user_dirs):
+  """
+  user_dirs - comma separated directories
+  """
+  import params
+
+  File(format("{tmp_dir}/changeUid.sh"),
+       content=StaticFile("changeToSecureUid.sh"),
+       mode=0555)
+  Execute(format("{tmp_dir}/changeUid.sh {user} {user_dirs} 2>/dev/null"),
+          not_if = format("test $(id -u {user}) -gt 1000"))
+    
+def setup_hadoop_env():
+  import params
+  if params.has_namenode:
+    if params.security_enabled:
+      tc_owner = "root"
+    else:
+      tc_owner = params.hdfs_user
+    Directory(params.hadoop_conf_empty_dir,
+              recursive=True,
+              owner='root',
+              group='root'
+    )
+    Link(params.hadoop_conf_dir,
+         to=params.hadoop_conf_empty_dir,
+         not_if=format("ls {hadoop_conf_dir}")
+    )
+    File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
+         owner=tc_owner,
+         content=InlineTemplate(params.hadoop_env_sh_template)
+    )

+ 0 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/hook.py

@@ -33,7 +33,6 @@ class BeforeInstallHook(Hook):
     install_repos()
     install_packages()
     setup_java()
-    setup_users()
 
 if __name__ == "__main__":
   BeforeInstallHook().execute()

+ 0 - 35
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py

@@ -21,41 +21,6 @@ import os
 
 from resource_management import *
 
-def setup_users():
-  """
-  Creates users before cluster installation
-  """
-  import params
-  
-  for group in params.group_list:
-    Group(group,
-        ignore_failures = params.ignore_groupsusers_create
-    )
-    
-  for user in params.user_list:
-    User(user,
-        gid = params.user_to_gid_dict[user],
-        groups = params.user_to_groups_dict[user],
-        ignore_failures = params.ignore_groupsusers_create       
-    )
-           
-  set_uid(params.smoke_user, params.smoke_user_dirs)
-
-  if params.has_hbase_masters:
-    set_uid(params.hbase_user, params.hbase_user_dirs)
-
-def set_uid(user, user_dirs):
-  """
-  user_dirs - comma separated directories
-  """
-  import params
-
-  File(format("{tmp_dir}/changeUid.sh"),
-       content=StaticFile("changeToSecureUid.sh"),
-       mode=0555)
-  Execute(format("{tmp_dir}/changeUid.sh {user} {user_dirs} 2>/dev/null"),
-          not_if = format("test $(id -u {user}) -gt 1000"))
-  
 def setup_java():
   """
   Installs jdk using specific params, that comes from ambari-server

+ 0 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py

@@ -27,7 +27,6 @@ class BeforeStartHook(Hook):
     import params
 
     self.run_custom_hook('before-ANY')
-    self.run_custom_hook('after-INSTALL')
     env.set_params(params)
 
     setup_hadoop()

+ 0 - 13
ambari-server/src/test/python/stacks/1.3.2/hooks/after-INSTALL/test_after_install.py

@@ -30,19 +30,6 @@ class TestHookAfterInstall(RMFTestCase):
                        command="hook",
                        config_file="default.json"
     )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
-                              owner = 'root',
-                              group = 'root',
-                              recursive = True,
-                              )
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-                              to = '/etc/hadoop/conf.empty',
-                              not_if = 'ls /etc/hadoop/conf'
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
-                              content = InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
-                              owner = 'hdfs',
-                              )
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',

+ 119 - 0
ambari-server/src/test/python/stacks/1.3.2/hooks/before-ANY/test_before_any.py

@@ -36,4 +36,123 @@ class TestHookBeforeInstall(RMFTestCase):
         ignore_failures = True,
         path = ['/bin', '/usr/bin/'],
     )
+    self.assertResourceCalled('Group', 'hadoop',
+        ignore_failures = False,
+    )
+    self.assertResourceCalled('Group', 'nobody',
+        ignore_failures = False,
+    )
+    self.assertResourceCalled('Group', 'users',
+        ignore_failures = False,
+    )
+    self.assertResourceCalled('Group', 'nagios',
+        ignore_failures = False,
+    )
+    self.assertResourceCalled('User', 'hive',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'oozie',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'nobody',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'nobody'],
+    )
+    self.assertResourceCalled('User', 'nagios',
+        gid = 'nagios',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'ambari-qa',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'users'],
+    )
+    self.assertResourceCalled('User', 'flume',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'hdfs',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'storm',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'mapred',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'hbase',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'tez',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'zookeeper',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'falcon',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'sqoop',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'yarn',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'hcat',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('File', '/tmp/changeUid.sh',
+        content = StaticFile('changeToSecureUid.sh'),
+        mode = 0555,
+    )
+    self.assertResourceCalled('Execute', '/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 2>/dev/null',
+        not_if = 'test $(id -u ambari-qa) -gt 1000',
+    )
+    self.assertResourceCalled('File', '/tmp/changeUid.sh',
+        content = StaticFile('changeToSecureUid.sh'),
+        mode = 0555,
+    )
+    self.assertResourceCalled('Execute', '/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/hadoop/hbase 2>/dev/null',
+        not_if = 'test $(id -u hbase) -gt 1000',
+    )
+    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
+        owner = 'root',
+        group = 'root',
+        recursive = True,
+    )
+    self.assertResourceCalled('Link', '/etc/hadoop/conf',
+        not_if = 'ls /etc/hadoop/conf',
+        to = '/etc/hadoop/conf.empty',
+    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
+        content = InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
+        owner = 'hdfs',
+    )
     self.assertNoMoreResources()

+ 0 - 106
ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py

@@ -47,111 +47,5 @@ class TestHookBeforeInstall(RMFTestCase):
         not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
         path = ['/bin', '/usr/bin/'],
     )
-    self.assertResourceCalled('Group', 'hadoop',
-        ignore_failures = False,
-    )
-    self.assertResourceCalled('Group', 'nobody',
-        ignore_failures = False,
-    )
-    self.assertResourceCalled('Group', 'users',
-        ignore_failures = False,
-    )
-    self.assertResourceCalled('Group', 'nagios',
-        ignore_failures = False,
-    )
-    self.assertResourceCalled('User', 'hive',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'oozie',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'nobody',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'nobody'],
-    )
-    self.assertResourceCalled('User', 'nagios',
-        gid = 'nagios',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'ambari-qa',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'users'],
-    )
-    self.assertResourceCalled('User', 'flume',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'hdfs',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'storm',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'mapred',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'hbase',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'tez',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'zookeeper',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'falcon',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'sqoop',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'yarn',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'hcat',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('File', '/tmp/changeUid.sh',
-        content = StaticFile('changeToSecureUid.sh'),
-        mode = 0555,
-    )
-    self.assertResourceCalled('Execute', '/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 2>/dev/null',
-        not_if = 'test $(id -u ambari-qa) -gt 1000',
-    )
-    self.assertResourceCalled('File', '/tmp/changeUid.sh',
-        content = StaticFile('changeToSecureUid.sh'),
-        mode = 0555,
-    )
-    self.assertResourceCalled('Execute', '/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/hadoop/hbase 2>/dev/null',
-        not_if = 'test $(id -u hbase) -gt 1000',
-    )
     self.assertResourceCalled('Package', 'unzip',)
     self.assertNoMoreResources()

+ 0 - 13
ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py

@@ -30,19 +30,6 @@ class TestHookAfterInstall(RMFTestCase):
                        command="hook",
                        config_file="default.json"
     )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
-                              owner = 'root',
-                              group = 'root',
-                              recursive = True,
-                              )
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-                              to = '/etc/hadoop/conf.empty',
-                              not_if = 'ls /etc/hadoop/conf'
-    )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
-                              content = InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
-                              owner = 'hdfs',
-                              )
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',

+ 119 - 0
ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py

@@ -36,4 +36,123 @@ class TestHookBeforeInstall(RMFTestCase):
         ignore_failures = True,
         path = ['/bin', '/usr/bin/'],
     )
+    self.assertResourceCalled('Group', 'hadoop',
+        ignore_failures = False,
+    )
+    self.assertResourceCalled('Group', 'nobody',
+        ignore_failures = False,
+    )
+    self.assertResourceCalled('Group', 'users',
+        ignore_failures = False,
+    )
+    self.assertResourceCalled('Group', 'nagios',
+        ignore_failures = False,
+    )
+    self.assertResourceCalled('User', 'hive',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'oozie',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'nobody',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'nobody'],
+    )
+    self.assertResourceCalled('User', 'nagios',
+        gid = 'nagios',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'ambari-qa',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'users'],
+    )
+    self.assertResourceCalled('User', 'flume',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'hdfs',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'storm',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'mapred',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'hbase',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'tez',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'users'],
+    )
+    self.assertResourceCalled('User', 'zookeeper',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'falcon',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'sqoop',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'yarn',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'hcat',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('File', '/tmp/changeUid.sh',
+        content = StaticFile('changeToSecureUid.sh'),
+        mode = 0555,
+    )
+    self.assertResourceCalled('Execute', '/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 2>/dev/null',
+        not_if = 'test $(id -u ambari-qa) -gt 1000',
+    )
+    self.assertResourceCalled('File', '/tmp/changeUid.sh',
+        content = StaticFile('changeToSecureUid.sh'),
+        mode = 0555,
+    )
+    self.assertResourceCalled('Execute', '/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/hadoop/hbase 2>/dev/null',
+        not_if = 'test $(id -u hbase) -gt 1000',
+    )
+    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
+        owner = 'root',
+        group = 'root',
+        recursive = True,
+    )
+    self.assertResourceCalled('Link', '/etc/hadoop/conf',
+        not_if = 'ls /etc/hadoop/conf',
+        to = '/etc/hadoop/conf.empty',
+    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
+        content = InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
+        owner = 'hdfs',
+    )
     self.assertNoMoreResources()

+ 0 - 106
ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py

@@ -49,110 +49,4 @@ class TestHookBeforeInstall(RMFTestCase):
         not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
         path = ['/bin', '/usr/bin/'],
     )
-    self.assertResourceCalled('Group', 'hadoop',
-        ignore_failures = False,
-    )
-    self.assertResourceCalled('Group', 'nobody',
-        ignore_failures = False,
-    )
-    self.assertResourceCalled('Group', 'users',
-        ignore_failures = False,
-    )
-    self.assertResourceCalled('Group', 'nagios',
-        ignore_failures = False,
-    )
-    self.assertResourceCalled('User', 'hive',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'oozie',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'nobody',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'nobody'],
-    )
-    self.assertResourceCalled('User', 'nagios',
-        gid = 'nagios',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'ambari-qa',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'users'],
-    )
-    self.assertResourceCalled('User', 'flume',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'hdfs',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'storm',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'mapred',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'hbase',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'tez',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'users'],
-    )
-    self.assertResourceCalled('User', 'zookeeper',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'falcon',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'sqoop',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'yarn',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'hcat',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('File', '/tmp/changeUid.sh',
-        content = StaticFile('changeToSecureUid.sh'),
-        mode = 0555,
-    )
-    self.assertResourceCalled('Execute', '/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 2>/dev/null',
-        not_if = 'test $(id -u ambari-qa) -gt 1000',
-    )
-    self.assertResourceCalled('File', '/tmp/changeUid.sh',
-        content = StaticFile('changeToSecureUid.sh'),
-        mode = 0555,
-    )
-    self.assertResourceCalled('Execute', '/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/hadoop/hbase 2>/dev/null',
-        not_if = 'test $(id -u hbase) -gt 1000',
-    )
     self.assertNoMoreResources()