瀏覽代碼

Ambari-10386. Support skipping install operations on hosts that are already sys-prepped

Sumit Mohanty 10 年之前
父節點
當前提交
048c158400
共有 43 個文件被更改,包括 642 次插入45 次删除
  1. 2 2
      ambari-common/src/main/python/resource_management/core/providers/package/apt.py
  2. 2 2
      ambari-common/src/main/python/resource_management/core/providers/package/yumrpm.py
  3. 2 2
      ambari-common/src/main/python/resource_management/core/providers/package/zypper.py
  4. 6 1
      ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
  5. 6 0
      ambari-common/src/main/python/resource_management/libraries/script/script.py
  6. 1 0
      ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
  7. 12 0
      ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
  8. 2 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
  9. 2 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java
  10. 0 3
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
  11. 2 0
      ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
  12. 1 0
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
  13. 3 0
      ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params.py
  14. 1 0
      ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py
  15. 1 0
      ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params_linux.py
  16. 3 0
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
  17. 3 0
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
  18. 3 0
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
  19. 1 0
      ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
  20. 1 0
      ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py
  21. 2 0
      ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py
  22. 1 0
      ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
  23. 3 0
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
  24. 4 0
      ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
  25. 1 0
      ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
  26. 2 0
      ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
  27. 1 0
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
  28. 4 0
      ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py
  29. 4 0
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params.py
  30. 4 0
      ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
  31. 4 0
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
  32. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
  33. 32 20
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
  34. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
  35. 3 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/repo_initialization.py
  36. 3 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
  37. 5 2
      ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
  38. 2 0
      ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProviderTest.java
  39. 13 0
      ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
  40. 36 0
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
  41. 3 1
      ambari-server/src/test/python/stacks/2.0.6/configs/default.json
  42. 435 0
      ambari-server/src/test/python/stacks/2.0.6/configs/default_no_install.json
  43. 24 12
      ambari-server/src/test/python/stacks/utils/RMFTestCase.py

+ 2 - 2
ambari-common/src/main/python/resource_management/core/providers/package/apt.py

@@ -101,7 +101,7 @@ class AptProvider(PackageProvider):
         Logger.info("Removing temporal sources directory: %s" % apt_sources_list_tmp_dir)
         os.rmdir(apt_sources_list_tmp_dir)
     else:
-      Logger.info("Skipping installing existent package %s" % (name))
+      Logger.info("Skipping installation of existing package %s" % (name))
 
   @replace_underscores
   def upgrade_package(self, name, use_repos=[]):
@@ -114,7 +114,7 @@ class AptProvider(PackageProvider):
       Logger.info("Removing package %s ('%s')" % (name, string_cmd_from_args_list(cmd)))
       shell.checked_call(cmd, sudo=True, logoutput=self.get_logoutput())
     else:
-      Logger.info("Skipping removing non-existent package %s" % (name))
+      Logger.info("Skipping removal of non-existing package %s" % (name))
 
   @replace_underscores
   def _check_existence(self, name):

+ 2 - 2
ambari-common/src/main/python/resource_management/core/providers/package/yumrpm.py

@@ -50,7 +50,7 @@ class YumProvider(PackageProvider):
       Logger.info("Installing package %s ('%s')" % (name, string_cmd_from_args_list(cmd)))
       shell.checked_call(cmd, sudo=True, logoutput=self.get_logoutput())
     else:
-      Logger.info("Skipping installing existent package %s" % (name))
+      Logger.info("Skipping installation of existing package %s" % (name))
 
   def upgrade_package(self, name, use_repos=[]):
     return self.install_package(name, use_repos)
@@ -61,7 +61,7 @@ class YumProvider(PackageProvider):
       Logger.info("Removing package %s ('%s')" % (name, string_cmd_from_args_list(cmd)))
       shell.checked_call(cmd, sudo=True, logoutput=self.get_logoutput())
     else:
-      Logger.info("Skipping removing non-existent package %s" % (name))
+      Logger.info("Skipping removal of non-existing package %s" % (name))
 
   def _check_existence(self, name):
     if '.' in name:  # To work with names like 'zookeeper_2_2_1_0_2072.noarch'

+ 2 - 2
ambari-common/src/main/python/resource_management/core/providers/package/zypper.py

@@ -71,7 +71,7 @@ class ZypperProvider(PackageProvider):
       Logger.info("Installing package %s ('%s')" % (name, string_cmd_from_args_list(cmd)))
       shell.checked_call(cmd, sudo=True, logoutput=self.get_logoutput())
     else:
-      Logger.info("Skipping installing existent package %s" % (name))
+      Logger.info("Skipping installation of existing package %s" % (name))
 
   def upgrade_package(self, name, use_repos=[]):
     return self.install_package(name, use_repos)
@@ -82,7 +82,7 @@ class ZypperProvider(PackageProvider):
       Logger.info("Removing package %s ('%s')" % (name, string_cmd_from_args_list(cmd)))
       shell.checked_call(cmd, sudo=True, logoutput=self.get_logoutput())
     else:
-      Logger.info("Skipping removing non-existent package %s" % (name))
+      Logger.info("Skipping removal of non-existing package %s" % (name))
 
   def _check_existence(self, name):
     code, out = shell.call(CHECK_CMD % name)

+ 6 - 1
ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py

@@ -137,13 +137,14 @@ def _copy_files(source_and_dest_pairs, component_user, file_owner, group_owner,
   return return_value
 
 
-def copy_tarballs_to_hdfs(tarball_prefix, hdp_select_component_name, component_user, file_owner, group_owner):
+def copy_tarballs_to_hdfs(tarball_prefix, hdp_select_component_name, component_user, file_owner, group_owner, ignore_sysprep=False):
   """
   :param tarball_prefix: Prefix of the tarball must be one of tez, hive, mr, pig
   :param hdp_select_component_name: Component name to get the status to determine the version
   :param component_user: User that will execute the Hadoop commands, usually smokeuser
   :param file_owner: Owner of the files copied to HDFS (typically hdfs user)
   :param group_owner: Group owner of the files copied to HDFS (typically hadoop group)
+  :param ignore_sysprep: Ignore sysprep directives
   :return: Returns 0 on success, 1 if no files were copied, and in some cases may raise an exception.
 
   In order to call this function, params.py must have all of the following,
@@ -152,6 +153,10 @@ def copy_tarballs_to_hdfs(tarball_prefix, hdp_select_component_name, component_u
   """
   import params
 
+  if not ignore_sysprep and hasattr(params, "host_sys_prepped") and params.host_sys_prepped:
+    Logger.info("Host is sys-prepped. Tarball %s will not be copied for %s." % (tarball_prefix, hdp_select_component_name))
+    return 0
+
   if not hasattr(params, "hdp_stack_version") or params.hdp_stack_version is None:
     Logger.warning("Could not find hdp_stack_version")
     return 1

+ 6 - 0
ambari-common/src/main/python/resource_management/libraries/script/script.py

@@ -274,6 +274,12 @@ class Script(object):
     from this list
     """
     config = self.get_config()
+    if 'host_sys_prepped' in config['hostLevelParams']:
+      # do not install anything on sys-prepped host
+      if config['hostLevelParams']['host_sys_prepped'] == True:
+        Logger.info("Node has all packages pre-installed. Skipping.")
+        return
+      pass
     try:
       package_list_str = config['hostLevelParams']['package_list']
       if isinstance(package_list_str, basestring) and len(package_list_str) > 0:

+ 1 - 0
ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java

@@ -306,6 +306,7 @@ public class ExecutionCommand extends AgentCommand {
     String GROUP_LIST = "group_list";
     String VERSION = "version";
     String REFRESH_TOPOLOGY = "refresh_topology";
+    String HOST_SYS_PREPPED = "host_sys_prepped";
     String COMMAND_RETRY_MAX_ATTEMPT_COUNT = "command_retry_max_attempt_count";
     String COMMAND_RETRY_ENABLED = "command_retry_enabled";
 

+ 12 - 0
ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java

@@ -293,6 +293,14 @@ public class Configuration {
   private static final int CLIENT_API_SSL_PORT_DEFAULT = 8443;
   private static final String LDAP_BIND_ANONYMOUSLY_DEFAULT = "true";
 
+  /**
+   * Indicator for sys prepped host
+   * It is possible the some nodes are sys prepped and some are not. This can be enabled later
+   * by agent over-writing global indicator from ambari-server
+   */
+  public static final String SYS_PREPPED_HOSTS_KEY = "packages.pre.installed";
+  public static final String SYS_PREPPED_HOSTS_DEFAULT = "false";
+
   /**
    * !!! TODO: For embedded server only - should be removed later
    */
@@ -738,6 +746,10 @@ public class Configuration {
     return new File(fileName);
   }
 
+  public String areHostsSysPrepped(){
+    return properties.getProperty(SYS_PREPPED_HOSTS_KEY, SYS_PREPPED_HOSTS_DEFAULT);
+  }
+
   public String getStackAdvisorScript() {
     return properties.getProperty(STACK_ADVISOR_SCRIPT, STACK_ADVISOR_SCRIPT_DEFAULT);
   }

+ 2 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java

@@ -35,6 +35,7 @@ import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_LOCAT
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_NAME;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.MYSQL_JDBC_URL;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.ORACLE_JDBC_URL;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOST_SYS_PREPPED;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.REPO_INFO;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE;
@@ -1096,6 +1097,7 @@ public class AmbariCustomCommandExecutionHelper {
     hostLevelParams.put(ORACLE_JDBC_URL, managementController.getOjdbcUrl());
     hostLevelParams.put(DB_DRIVER_FILENAME, configs.getMySQLJarName());
     hostLevelParams.putAll(managementController.getRcaParameters());
+    hostLevelParams.put(HOST_SYS_PREPPED, configs.areHostsSysPrepped());
     ClusterVersionEntity clusterVersionEntity = clusterVersionDAO.findByClusterAndStateCurrent(cluster.getClusterName());
     if (clusterVersionEntity == null) {
       List<ClusterVersionEntity> clusterVersionEntityList = clusterVersionDAO

+ 2 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java

@@ -87,6 +87,7 @@ import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_R
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_NAME;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_VERSION;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.USER_LIST;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOST_SYS_PREPPED;
 
 /**
  * Resource provider for client config resources.
@@ -286,6 +287,7 @@ public class ClientConfigResourceProvider extends AbstractControllerResourceProv
       hostLevelParams.put(DB_NAME, managementController.getServerDB());
       hostLevelParams.put(MYSQL_JDBC_URL, managementController.getMysqljdbcUrl());
       hostLevelParams.put(ORACLE_JDBC_URL, managementController.getOjdbcUrl());
+      hostLevelParams.put(HOST_SYS_PREPPED, configs.areHostsSysPrepped());
       hostLevelParams.putAll(managementController.getRcaParameters());
       hostLevelParams.putAll(managementController.getRcaParameters());
 

+ 0 - 3
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java

@@ -645,9 +645,6 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
     actionContext.setIgnoreMaintenance(true);
     actionContext.setTimeout(Short.valueOf(s_configuration.getDefaultAgentTaskTimeout(false)));
 
-    Map<String, String> hostLevelParams = new HashMap<String, String>();
-    hostLevelParams.put(JDK_LOCATION, getManagementController().getJdkResourceUrl());
-
     ExecuteCommandJson jsons = s_commandExecutionHelper.get().getCommandJson(
         actionContext, cluster);
 

+ 2 - 0
ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py

@@ -117,6 +117,8 @@ if security_enabled:
 else:
   kinit_cmd = ""
 
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+
 #for create_hdfs_directory
 hostname = status_params.hostname
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']

+ 1 - 0
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py

@@ -84,6 +84,7 @@ java64_home = config['hostLevelParams']['java_home']
 java_version = int(config['hostLevelParams']['java_version'])
 
 metrics_collector_heapsize = default('/configurations/ams-env/metrics_collector_heapsize', "512m")
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
 hbase_log_dir = config['configurations']['ams-hbase-env']['hbase_log_dir']
 master_heapsize = config['configurations']['ams-hbase-env']['hbase_master_heapsize']

+ 3 - 0
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params.py

@@ -18,8 +18,11 @@ limitations under the License.
 """
 from ambari_commons import OSCheck
 from status_params import *
+from resource_management.libraries.functions.default import default
 
 if OSCheck.is_windows_family():
   from params_windows import *
 else:
   from params_linux import *
+
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)

+ 1 - 0
ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py

@@ -30,6 +30,7 @@ else:
 config = Script.get_config()
 
 stack_name = default("/hostLevelParams/stack_name", None)
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 version = default("/commandParams/version", None)

+ 1 - 0
ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params_linux.py

@@ -26,6 +26,7 @@ from ambari_commons import OSCheck
 config = Script.get_config()
 
 flume_conf_dir = '/etc/flume/conf'
+
 flume_user = 'flume'
 flume_group = 'flume'
 if 'flume-env' in config['configurations'] and 'flume_user' in config['configurations']['flume-env']:

+ 3 - 0
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py

@@ -18,8 +18,11 @@ limitations under the License.
 
 """
 from ambari_commons import OSCheck
+from resource_management.libraries.functions.default import default
 
 if OSCheck.is_windows_family():
   from params_windows import *
 else:
   from params_linux import *
+
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)

+ 3 - 0
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py

@@ -17,8 +17,11 @@ limitations under the License.
 
 """
 from ambari_commons import OSCheck
+from resource_management.libraries.functions.default import default
 
 if OSCheck.is_windows_family():
   from params_windows import *
 else:
   from params_linux import *
+
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)

+ 3 - 0
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py

@@ -18,8 +18,11 @@ limitations under the License.
 
 """
 from ambari_commons import OSCheck
+from resource_management.libraries.functions.default import default
 
 if OSCheck.is_windows_family():
   from params_windows import *
 else:
   from params_linux import *
+
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)

+ 1 - 0
ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py

@@ -30,6 +30,7 @@ config = Script.get_config()
 stack_name = default("/hostLevelParams/stack_name", None)
 
 version = default("/commandParams/version", None)
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)

+ 1 - 0
ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/params.py

@@ -40,6 +40,7 @@ kadm5_acl_path = kadm5_acl_dir + '/' + kadm5_acl_file
 
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
 configurations = None
 keytab_details = None

+ 2 - 0
ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py

@@ -30,6 +30,8 @@ if OSCheck.is_windows_family():
 else:
   from params_linux import *
 
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 

+ 1 - 0
ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py

@@ -27,6 +27,7 @@ config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
 stack_name = default("/hostLevelParams/stack_name", None)
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)

+ 3 - 0
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py

@@ -18,8 +18,11 @@ limitations under the License.
 
 """
 from ambari_commons import OSCheck
+from resource_management.libraries.functions.default import default
 
 if OSCheck.is_windows_family():
   from params_windows import *
 else:
   from params_linux import *
+
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)

+ 4 - 0
ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py

@@ -19,8 +19,12 @@ Ambari Agent
 
 """
 from ambari_commons import OSCheck
+from resource_management.libraries.functions.default import default
 
 if OSCheck.is_windows_family():
   from params_windows import *
 else:
   from params_linux import *
+
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+

+ 1 - 0
ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py

@@ -27,6 +27,7 @@ tmp_dir = Script.get_tmp_dir()
 
 stack_name = default("/hostLevelParams/stack_name", None)
 version = default("/commandParams/version", None)
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)

+ 2 - 0
ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py

@@ -28,6 +28,8 @@ if OSCheck.is_windows_family():
 else:
   from params_linux import *
 
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+
 # server configurations
 config = Script.get_config()
 

+ 1 - 0
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py

@@ -30,6 +30,7 @@ tmp_dir = Script.get_tmp_dir()
 stack_name = default("/hostLevelParams/stack_name", None)
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 version = default("/commandParams/version", None)

+ 4 - 0
ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py

@@ -18,8 +18,12 @@ limitations under the License.
 """
 
 from ambari_commons import OSCheck
+from resource_management.libraries.functions.default import default
 
 if OSCheck.is_windows_family():
   from params_windows import *
 else:
   from params_linux import *
+
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+

+ 4 - 0
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params.py

@@ -18,8 +18,12 @@ limitations under the License.
 
 """
 from ambari_commons import OSCheck
+from resource_management.libraries.functions.default import default
 
 if OSCheck.is_windows_family():
   from params_windows import *
 else:
   from params_linux import *
+
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+

+ 4 - 0
ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py

@@ -18,8 +18,12 @@ limitations under the License.
 
 """
 from ambari_commons import OSCheck
+from resource_management.libraries.functions.default import default
 
 if OSCheck.is_windows_family():
   from params_windows import *
 else:
   from params_linux import *
+
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+

+ 4 - 0
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py

@@ -20,8 +20,12 @@ Ambari Agent
 
 """
 from ambari_commons import OSCheck
+from resource_management.libraries.functions.default import default
 
 if OSCheck.is_windows_family():
   from params_windows import *
 else:
   from params_linux import *
+
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py

@@ -180,5 +180,6 @@ user_to_gid_dict = collections.defaultdict(lambda:user_group)
 
 user_list = json.loads(config['hostLevelParams']['user_list'])
 group_list = json.loads(config['hostLevelParams']['group_list'])
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
 tez_am_view_acls = config['configurations']['tez-site']["tez.am.view-acls"]

+ 32 - 20
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py

@@ -62,20 +62,25 @@ def setup_users():
   Creates users before cluster installation
   """
   import params
-  
-  for group in params.group_list:
-    Group(group,
-        ignore_failures = params.ignore_groupsusers_create
-    )
-    
-  for user in params.user_list:
-    User(user,
-        gid = params.user_to_gid_dict[user],
-        groups = params.user_to_groups_dict[user],
-        ignore_failures = params.ignore_groupsusers_create       
-    )
-           
-  set_uid(params.smoke_user, params.smoke_user_dirs)
+
+  if not params.host_sys_prepped:
+    for group in params.group_list:
+      Group(group,
+          ignore_failures = params.ignore_groupsusers_create
+      )
+
+    for user in params.user_list:
+      User(user,
+          gid = params.user_to_gid_dict[user],
+          groups = params.user_to_groups_dict[user],
+          ignore_failures = params.ignore_groupsusers_create
+      )
+
+    set_uid(params.smoke_user, params.smoke_user_dirs)
+  else:
+    print 'Skipping creation of User and Group as host is sys prepped'
+    pass
+
 
   if params.has_hbase_masters:
     Directory (params.hbase_tmp_dir,
@@ -84,12 +89,19 @@ def setup_users():
                recursive = True,
                cd_access="a",
     )
-    set_uid(params.hbase_user, params.hbase_user_dirs)
-
-  if params.has_namenode:
-    create_dfs_cluster_admins()
-  if params.has_tez and params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.3') >= 0:
-      create_tez_am_view_acls()
+    if not params.host_sys_prepped:
+      set_uid(params.hbase_user, params.hbase_user_dirs)
+    else:
+      print 'Skipping setting uid for hbase user as host is sys prepped'
+      pass
+
+  if not params.host_sys_prepped:
+    if params.has_namenode:
+      create_dfs_cluster_admins()
+    if params.has_tez and params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.3') >= 0:
+        create_tez_am_view_acls()
+  else:
+    print 'Skipping setting dfs cluster admin and tez view acls as host is sys prepped'
 
 def create_dfs_cluster_admins():
   """

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py

@@ -97,6 +97,7 @@ jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is a
 jce_location = config['hostLevelParams']['jdk_location']
 jdk_location = config['hostLevelParams']['jdk_location']
 ignore_groupsusers_create = default("/configurations/cluster-env/ignore_groupsusers_create", False)
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
 smoke_user_dirs = format("/tmp/hadoop-{smoke_user},/tmp/hsperfdata_{smoke_user},/home/{smoke_user},/tmp/{smoke_user},/tmp/sqoop-{smoke_user}")
 if has_hbase_masters:

+ 3 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/repo_initialization.py

@@ -52,6 +52,9 @@ def _alter_repo(action, repo_string, repo_template):
 
 def install_repos():
   import params
+  if params.host_sys_prepped:
+    return
+
   template = "repo_suse_rhel.j2" if OSCheck.is_suse_family() or OSCheck.is_redhat_family() else "repo_ubuntu.j2"
   _alter_repo("create", params.repo_info, template)
   if params.service_repo_info:

+ 3 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py

@@ -76,6 +76,9 @@ def setup_java():
 
 def install_packages():
   import params
+  if params.host_sys_prepped:
+    return
+
   packages = ['unzip', 'curl']
   if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
     packages.append('hdp-select')

+ 5 - 2
ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java

@@ -76,6 +76,7 @@ import java.util.Map;
 import java.util.Set;
 
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_DRIVER_FILENAME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOST_SYS_PREPPED;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JAVA_VERSION;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_NAME;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_VERSION;
@@ -1809,6 +1810,7 @@ public class AmbariManagementControllerImplTest {
     expect(configuration.getOjdbcJarName()).andReturn(OJDBC_JAR_NAME);
     expect(configuration.getServerDBName()).andReturn(SERVER_DB_NAME);
     expect(configuration.getJavaVersion()).andReturn(8);
+    expect(configuration.areHostsSysPrepped()).andReturn("true");
     expect(clusterVersionDAO.findByClusterAndStateCurrent(clusterName)).andReturn(clusterVersionEntity).anyTimes();
     expect(clusterVersionEntity.getRepositoryVersion()).andReturn(repositoryVersionEntity).anyTimes();
     expect(repositoryVersionEntity.getVersion()).andReturn("1234").anyTimes();
@@ -1846,11 +1848,12 @@ public class AmbariManagementControllerImplTest {
 
     Map<String, String> defaultHostParams = helper.createDefaultHostParams(cluster);
 
-    assertEquals(defaultHostParams.size(), 12);
+    assertEquals(defaultHostParams.size(), 13);
     assertEquals(defaultHostParams.get(DB_DRIVER_FILENAME), MYSQL_JAR);
     assertEquals(defaultHostParams.get(STACK_NAME), SOME_STACK_NAME);
     assertEquals(defaultHostParams.get(STACK_VERSION), SOME_STACK_VERSION);
-    assertEquals(defaultHostParams.get(JAVA_VERSION), "8");
+    assertEquals("true", defaultHostParams.get(HOST_SYS_PREPPED));
+    assertEquals("8", defaultHostParams.get(JAVA_VERSION));
   }
 
   @Test

+ 2 - 0
ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProviderTest.java

@@ -240,6 +240,7 @@ public class ClientConfigResourceProviderTest {
     expect(configMap.get(Configuration.AMBARI_PYTHON_WRAP_KEY)).andReturn(Configuration.AMBARI_PYTHON_WRAP_DEFAULT);
     expect(configuration.getConfigsMap()).andReturn(returnConfigMap);
     expect(configuration.getJavaVersion()).andReturn(8);
+    expect(configuration.areHostsSysPrepped()).andReturn("false");
     expect(configuration.getExternalScriptTimeout()).andReturn(Integer.parseInt(Configuration.EXTERNAL_SCRIPT_TIMEOUT_DEFAULT));
     Map<String,String> props = new HashMap<String, String>();
     props.put(Configuration.HIVE_METASTORE_PASSWORD_PROPERTY, "pass");
@@ -442,6 +443,7 @@ public class ClientConfigResourceProviderTest {
     expect(configuration.getConfigsMap()).andReturn(returnConfigMap);
     expect(configuration.getCommonServicesPath()).andReturn(commonServicesPath);
     expect(configuration.getJavaVersion()).andReturn(8);
+    expect(configuration.areHostsSysPrepped()).andReturn("false");
     expect(configuration.getExternalScriptTimeout()).andReturn(Integer.parseInt(Configuration.EXTERNAL_SCRIPT_TIMEOUT_DEFAULT));
     Map<String,String> props = new HashMap<String, String>();
     props.put(Configuration.HIVE_METASTORE_PASSWORD_PROPERTY, "pass");

+ 13 - 0
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py

@@ -135,6 +135,19 @@ class TestNamenode(RMFTestCase):
     self.assertNoMoreResources()
     pass
 
+  def test_install_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "install",
+                       config_file = "default_no_install.json",
+                       hdp_stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES,
+                       try_install=True
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+    pass
+
   def test_start_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
                        classname = "NameNode",

+ 36 - 0
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py

@@ -77,6 +77,42 @@ class TestHiveServer(RMFTestCase):
     self.assertTrue(socket_mock.called)
     self.assertTrue(s.close.called)
 
+  @patch.object(dynamic_variable_interpretation, "_get_tar_source_and_dest_folder")
+  @patch("socket.socket")
+  def test_start_default_no_copy(self, socket_mock, get_tar_mock):
+    s = socket_mock.return_value
+
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
+                       classname = "HiveServer",
+                       command = "start",
+                       config_file="default_no_install.json",
+                       hdp_stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+
+    get_tar_mock.return_value = ("a", "b")
+    self.assert_configure_default()
+
+    self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
+                              environment = {'PATH': '/bin:/usr/lib/hive/bin:/usr/bin'},
+                              user = 'hive',
+                              )
+    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
+                              not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
+                              environment = {'HADOOP_HOME' : '/usr', 'JAVA_HOME':'/usr/jdk64/jdk1.7.0_45'},
+                              path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
+                              user = 'hive'
+    )
+
+    self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/lib/hive/lib//mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
+                              path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'], tries=5, try_sleep=10
+    )
+
+    self.assertNoMoreResources()
+    self.assertTrue(socket_mock.called)
+    self.assertTrue(s.close.called)
+    self.assertFalse(get_tar_mock.called)
+
   @patch("socket.socket")
   @patch.object(dynamic_variable_interpretation, "copy_tarballs_to_hdfs", new=MagicMock())
   def test_stop_default(self, socket_mock):

+ 3 - 1
ambari-server/src/test/python/stacks/2.0.6/configs/default.json

@@ -476,7 +476,9 @@
         "ignore_groupsusers_create": "false",
         "smokeuser": "ambari-qa",
         "kerberos_domain": "EXAMPLE.COM",
-        "user_group": "hadoop"
+        "user_group": "hadoop",
+        "mapreduce_tar_destination_folder" : "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/",
+        "mapreduce_tar_source" : "/usr/hdp/current/hadoop-client/mapreduce.tar.gz"
       },
 
       "hbase-env": {

文件差異過大導致無法顯示
+ 435 - 0
ambari-server/src/test/python/stacks/2.0.6/configs/default_no_install.json


+ 24 - 12
ambari-server/src/test/python/stacks/utils/RMFTestCase.py

@@ -68,7 +68,8 @@ class RMFTestCase(TestCase):
                     kinit_path_local="/usr/bin/kinit",
                     os_env={'PATH':'/bin'},
                     target=TARGET_STACKS,
-                    mocks_dict={}
+                    mocks_dict={},
+                    try_install=False
                     ):
     norm_path = os.path.normpath(path)
     src_dir = RMFTestCase.get_src_folder()
@@ -133,17 +134,28 @@ class RMFTestCase(TestCase):
       del(sys.modules["status_params"])
 
     # run
-    with Environment(basedir, test_mode=True) as RMFTestCase.env:
-      with patch('resource_management.core.shell.checked_call', side_effect=checked_call_mocks) as mocks_dict['checked_call']:
-        with patch('resource_management.core.shell.call', side_effect=call_mocks) as mocks_dict['call']:
-          with patch.object(Script, 'get_config', return_value=self.config_dict) as mocks_dict['get_config']: # mocking configurations
-            with patch.object(Script, 'get_tmp_dir', return_value="/tmp") as mocks_dict['get_tmp_dir']:
-              with patch.object(Script, 'install_packages') as mocks_dict['install_packages']:
-                with patch('resource_management.libraries.functions.get_kinit_path', return_value=kinit_path_local) as mocks_dict['get_kinit_path']:
-                  with patch.object(platform, 'linux_distribution', return_value=os_type) as mocks_dict['linux_distribution']:
-                    with patch.object(os, "environ", new=os_env) as mocks_dict['environ']:
-                      method(RMFTestCase.env)
-                      
+    if try_install:
+      with Environment(basedir, test_mode=True) as RMFTestCase.env:
+        with patch('resource_management.core.shell.checked_call', side_effect=checked_call_mocks) as mocks_dict['checked_call']:
+          with patch('resource_management.core.shell.call', side_effect=call_mocks) as mocks_dict['call']:
+            with patch.object(Script, 'get_config', return_value=self.config_dict) as mocks_dict['get_config']: # mocking configurations
+              with patch.object(Script, 'get_tmp_dir', return_value="/tmp") as mocks_dict['get_tmp_dir']:
+                with patch.object(Script, 'install_packages') as mocks_dict['install_packages']:
+                  with patch('resource_management.libraries.functions.get_kinit_path', return_value=kinit_path_local) as mocks_dict['get_kinit_path']:
+                    with patch.object(platform, 'linux_distribution', return_value=os_type) as mocks_dict['linux_distribution']:
+                      with patch.object(os, "environ", new=os_env) as mocks_dict['environ']:
+                        method(RMFTestCase.env)
+    else:
+      with Environment(basedir, test_mode=True) as RMFTestCase.env:
+        with patch('resource_management.core.shell.checked_call', side_effect=checked_call_mocks) as mocks_dict['checked_call']:
+          with patch('resource_management.core.shell.call', side_effect=call_mocks) as mocks_dict['call']:
+            with patch.object(Script, 'get_config', return_value=self.config_dict) as mocks_dict['get_config']: # mocking configurations
+              with patch.object(Script, 'get_tmp_dir', return_value="/tmp") as mocks_dict['get_tmp_dir']:
+                  with patch('resource_management.libraries.functions.get_kinit_path', return_value=kinit_path_local) as mocks_dict['get_kinit_path']:
+                    with patch.object(platform, 'linux_distribution', return_value=os_type) as mocks_dict['linux_distribution']:
+                      with patch.object(os, "environ", new=os_env) as mocks_dict['environ']:
+                        method(RMFTestCase.env)
+
     sys.path.remove(scriptsdir)
   
   def getConfig(self):

部分文件因文件數量過多而無法顯示