Ver código fonte

AMBARI-9780. RU: upgrade checks need changes, update 1 (ncole)

Nate Cole 10 anos atrás
pai
commit
08cdfdc66b

+ 3 - 1
ambari-server/src/main/java/org/apache/ambari/server/checks/AbstractCheckDescriptor.java

@@ -46,6 +46,8 @@ public abstract class AbstractCheckDescriptor {
 
   private static final Logger LOG = LoggerFactory.getLogger(AbstractCheckDescriptor.class);
 
+  protected static final String DEFAULT = "default";
+
   @Inject
   Provider<Clusters> clustersProvider;
 
@@ -110,7 +112,7 @@ public abstract class AbstractCheckDescriptor {
    * @return the failure string
    */
   protected String getFailReason(PrerequisiteCheck prerequisiteCheck, PrereqCheckRequest request) {
-    return getFailReason("default", prerequisiteCheck, request);
+    return getFailReason(DEFAULT, prerequisiteCheck, request);
   }
 
   /**

+ 20 - 20
ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java

@@ -31,65 +31,65 @@ public enum CheckDescription {
   HOSTS_HEARTBEAT(PrereqCheckType.HOST,
       "All hosts must be heartbeating with the Ambari Server unless they are in Maintenance Mode",
       new HashMap<String, String>() {{
-        put("default",
+        put(AbstractCheckDescriptor.DEFAULT,
             "The following hosts must be heartbeating to the Ambari Server: {{fails}}.");
       }}),
 
   HOSTS_MASTER_MAINTENANCE(PrereqCheckType.HOST,
       "Hosts in Maintenance Mode must not have any master components",
       new HashMap<String, String>() {{
-        put("default",
+        put(AbstractCheckDescriptor.DEFAULT,
           "The following hosts must not be in in Maintenance Mode since they host Master components: {{fails}}.");
-        put("no_upgrade_name",
+        put(HostsMasterMaintenanceCheck.KEY_NO_UPGRADE_NAME,
           "Could not find suitable upgrade pack for %s %s to version {{version}}.");
-        put("no_upgrade_pack",
+        put(HostsMasterMaintenanceCheck.KEY_NO_UPGRADE_PACK,
           "Could not find upgrade pack named %s.");
       }}),
 
   HOSTS_REPOSITORY_VERSION(PrereqCheckType.HOST,
       "All hosts should have target version installed",
       new HashMap<String, String>() {{
-        put("default",
+        put(AbstractCheckDescriptor.DEFAULT,
           "The following hosts must have version {{version}} installed: {{fails}}.");
-        put("no_repo_version",
+        put(HostsRepositoryVersionCheck.KEY_NO_REPO_VERSION,
           "Repository version {{version}} does not exist.");
       }}),
 
   SECONDARY_NAMENODE_MUST_BE_DELETED(PrereqCheckType.SERVICE,
       "The SNameNode component must be deleted from all hosts",
       new HashMap<String, String>() {{
-        put("default", "The SNameNode component must be deleted from host: {{fails}}.");
+        put(AbstractCheckDescriptor.DEFAULT, "The SNameNode component must be deleted from host: {{fails}}.");
       }}),
 
   SERVICES_DECOMMISSION(PrereqCheckType.SERVICE,
       "Services should not have components in decommission state",
       new HashMap<String, String>() {{
-        put("default",
+        put(AbstractCheckDescriptor.DEFAULT,
           "The following Services must not have components in decommissioned or decommissioning state: {{fails}}.");
       }}),
 
   SERVICES_MAINTENANCE_MODE(PrereqCheckType.SERVICE,
       "No services can be in Maintenance Mode",
       new HashMap<String, String>() {{
-        put("default",
+        put(AbstractCheckDescriptor.DEFAULT,
           "The following Services must not be in Maintenance Mode: {{fails}}.");
       }}),
 
   SERVICES_MR_DISTRIBUTED_CACHE(PrereqCheckType.SERVICE,
       "MapReduce should reference Hadoop libraries from the distributed cache in HDFS",
       new HashMap<String, String>() {{
-        put("app_classpath",
+        put(ServicesMapReduceDistributedCacheCheck.KEY_APP_CLASSPATH,
           "The mapred-site.xml property mapreduce.application.classpath should be set.");
-        put("framework_path",
+        put(ServicesMapReduceDistributedCacheCheck.KEY_FRAMEWORK_PATH,
           "The mapred-site.xml property mapreduce.application.framework.path should be set.");
-        put("not_dfs",
+        put(ServicesMapReduceDistributedCacheCheck.KEY_NOT_DFS,
           "The mapred-site.xml property mapreduce.application.framework.path or the core-site.xml property fs.defaultFS should point to *dfs:/ url.");
       }}),
 
   SERVICES_NAMENODE_HA(PrereqCheckType.SERVICE,
       "NameNode High Availability must  be enabled",
       new HashMap<String, String>() {{
-        put("default",
+        put(AbstractCheckDescriptor.DEFAULT,
           "NameNode High Availability is not enabled. Verify that dfs.nameservices property is present in hdfs-site.xml.");
       }}),
 
@@ -97,29 +97,29 @@ public enum CheckDescription {
   SERVICES_TEZ_DISTRIBUTED_CACHE(PrereqCheckType.SERVICE,
       "Tez should reference Hadoop libraries from the distributed cache in HDFS",
       new HashMap<String, String>() {{
-        put("tez_lib_uri_missing",
+        put(ServicesTezDistributedCacheCheck.KEY_LIB_URI_MISSING,
           "The tez-site.xml property tez.lib.uris should be set.");
-        put("tez_use_hadoop_libs",
+        put(ServicesTezDistributedCacheCheck.KEY_USE_HADOOP_LIBS,
           "The tez-site.xml property tez.use.cluster-hadoop-libs should be set.");
-        put("lib_not_dfs",
+        put(ServicesTezDistributedCacheCheck.KEY_LIB_NOT_DFS,
           "The tez-site.xml property tez.lib.uris or the core-site.xml property fs.defaultFS should point to *dfs:/ url.");
-        put("lib_not_targz",
+        put(ServicesTezDistributedCacheCheck.KEY_LIB_NOT_TARGZ,
           "The tez-site.xml property tez.lib.uris should point to tar.gz file.");
-        put("tez_use_hadoop_libs_false",
+        put(ServicesTezDistributedCacheCheck.KEY_USE_HADOOP_LIBS_FALSE,
           "The tez-site.xml property tez.use.cluster.hadoop-libs should be set to false.");
       }}),
 
   SERVICES_UP(PrereqCheckType.SERVICE,
       "All services must be started",
       new HashMap<String, String>() {{
-        put("default",
+        put(AbstractCheckDescriptor.DEFAULT,
           "The following Services must be started: {{fails}}");
       }}),
 
   SERVICES_YARN_WP(PrereqCheckType.SERVICE,
       "YARN work preserving restart should be enabled",
       new HashMap<String, String>() {{
-        put("default",
+        put(AbstractCheckDescriptor.DEFAULT,
           "YARN should have work preserving restart enabled. The yarn-site.xml property yarn.resourcemanager.work-preserving-recovery.enabled property should be set to true.");
       }});
 

+ 5 - 2
ambari-server/src/main/java/org/apache/ambari/server/checks/HostsMasterMaintenanceCheck.java

@@ -39,6 +39,9 @@ import org.apache.ambari.server.state.stack.UpgradePack.ProcessingComponent;
  */
 public class HostsMasterMaintenanceCheck extends AbstractCheckDescriptor {
 
+  static final String KEY_NO_UPGRADE_NAME = "no_upgrade_name";
+  static final String KEY_NO_UPGRADE_PACK = "no_upgrade_pack";
+
   /**
    * Constructor.
    */
@@ -60,14 +63,14 @@ public class HostsMasterMaintenanceCheck extends AbstractCheckDescriptor {
     final String upgradePackName = repositoryVersionHelper.get().getUpgradePackageName(stackId.getStackName(), stackId.getStackVersion(), request.getRepositoryVersion());
     if (upgradePackName == null) {
       prerequisiteCheck.setStatus(PrereqCheckStatus.FAIL);
-      String fail = getFailReason("no_upgrade_name", prerequisiteCheck, request);
+      String fail = getFailReason(KEY_NO_UPGRADE_NAME, prerequisiteCheck, request);
       prerequisiteCheck.setFailReason(String.format(fail, stackId.getStackName(), stackId.getStackVersion()));
       return;
     }
     final UpgradePack upgradePack = ambariMetaInfo.get().getUpgradePacks(stackId.getStackName(), stackId.getStackVersion()).get(upgradePackName);
     if (upgradePack == null) {
       prerequisiteCheck.setStatus(PrereqCheckStatus.FAIL);
-      String fail = getFailReason("no_upgrade_pack", prerequisiteCheck, request);
+      String fail = getFailReason(KEY_NO_UPGRADE_PACK, prerequisiteCheck, request);
       prerequisiteCheck.setFailReason(String.format(fail, upgradePackName));
       return;
     }

+ 3 - 1
ambari-server/src/main/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheck.java

@@ -36,6 +36,8 @@ import org.apache.ambari.server.state.stack.PrerequisiteCheck;
  */
 public class HostsRepositoryVersionCheck extends AbstractCheckDescriptor {
 
+  static final String KEY_NO_REPO_VERSION = "no_repo_version";
+
   /**
    * Constructor.
    */
@@ -60,7 +62,7 @@ public class HostsRepositoryVersionCheck extends AbstractCheckDescriptor {
         final RepositoryVersionEntity repositoryVersion = repositoryVersionDaoProvider.get().findByStackAndVersion(stackId.getStackId(), request.getRepositoryVersion());
         if (repositoryVersion == null) {
           prerequisiteCheck.setStatus(PrereqCheckStatus.FAIL);
-          prerequisiteCheck.setFailReason(getFailReason("no_repo_version",prerequisiteCheck, request));
+          prerequisiteCheck.setFailReason(getFailReason(KEY_NO_REPO_VERSION, prerequisiteCheck, request));
           prerequisiteCheck.getFailedOn().addAll(clusterHosts.keySet());
           return;
         }

+ 7 - 3
ambari-server/src/main/java/org/apache/ambari/server/checks/ServicesMapReduceDistributedCacheCheck.java

@@ -36,6 +36,10 @@ import org.apache.commons.lang.StringUtils;
  */
 public class ServicesMapReduceDistributedCacheCheck extends AbstractCheckDescriptor {
 
+  static final String KEY_APP_CLASSPATH = "app_classpath";
+  static final String KEY_FRAMEWORK_PATH = "framework_path";
+  static final String KEY_NOT_DFS = "not_dfs";
+
   @Override
   public boolean isApplicable(PrereqCheckRequest request)
     throws AmbariException {
@@ -79,11 +83,11 @@ public class ServicesMapReduceDistributedCacheCheck extends AbstractCheckDescrip
 
     List<String> errorMessages = new ArrayList<String>();
     if (applicationClasspath == null || applicationClasspath.isEmpty()) {
-      errorMessages.add(getFailReason("app_classpath", prerequisiteCheck, request));
+      errorMessages.add(getFailReason(KEY_APP_CLASSPATH, prerequisiteCheck, request));
     }
 
     if (frameworkPath == null || frameworkPath.isEmpty()) {
-      errorMessages.add(getFailReason("framework_path", prerequisiteCheck, request));
+      errorMessages.add(getFailReason(KEY_FRAMEWORK_PATH, prerequisiteCheck, request));
     }
 
     if (!errorMessages.isEmpty()) {
@@ -96,7 +100,7 @@ public class ServicesMapReduceDistributedCacheCheck extends AbstractCheckDescrip
     if (!frameworkPath.matches("^[^:]*dfs:.*") && (defaultFS == null || !defaultFS.matches("^[^:]*dfs:.*"))) {
       prerequisiteCheck.getFailedOn().add("MAPREDUCE2");
       prerequisiteCheck.setStatus(PrereqCheckStatus.FAIL);
-      prerequisiteCheck.setFailReason(getFailReason("not_dfs", prerequisiteCheck, request));
+      prerequisiteCheck.setFailReason(getFailReason(KEY_NOT_DFS, prerequisiteCheck, request));
     }
   }
 }

+ 11 - 5
ambari-server/src/main/java/org/apache/ambari/server/checks/ServicesTezDistributedCacheCheck.java

@@ -36,6 +36,12 @@ import org.apache.commons.lang.StringUtils;
  */
 public class ServicesTezDistributedCacheCheck extends AbstractCheckDescriptor {
 
+  static final String KEY_LIB_URI_MISSING = "tez_lib_uri_missing";
+  static final String KEY_USE_HADOOP_LIBS = "tez_use_hadoop_libs";
+  static final String KEY_LIB_NOT_DFS = "lib_not_dfs";
+  static final String KEY_LIB_NOT_TARGZ = "lib_not_targz";
+  static final String KEY_USE_HADOOP_LIBS_FALSE = "tez_use_hadoop_libs_false";
+
   @Override
   public boolean isApplicable(PrereqCheckRequest request)
     throws AmbariException {
@@ -79,11 +85,11 @@ public class ServicesTezDistributedCacheCheck extends AbstractCheckDescriptor {
 
     List<String> errorMessages = new ArrayList<String>();
     if (libUris == null || libUris.isEmpty()) {
-      errorMessages.add(getFailReason("tez_lib_uri_missing", prerequisiteCheck, request));
+      errorMessages.add(getFailReason(KEY_LIB_URI_MISSING, prerequisiteCheck, request));
     }
 
     if (useHadoopLibs == null || useHadoopLibs.isEmpty()) {
-      errorMessages.add(getFailReason("tez_use_hadoop_libs", prerequisiteCheck, request));
+      errorMessages.add(getFailReason(KEY_USE_HADOOP_LIBS, prerequisiteCheck, request));
     }
 
     if (!errorMessages.isEmpty()) {
@@ -94,15 +100,15 @@ public class ServicesTezDistributedCacheCheck extends AbstractCheckDescriptor {
     }
 
     if (!libUris.matches("^[^:]*dfs:.*") && (defaultFS == null || !defaultFS.matches("^[^:]*dfs:.*"))) {
-      errorMessages.add(getFailReason("lib_not_dfs", prerequisiteCheck, request));
+      errorMessages.add(getFailReason(KEY_LIB_NOT_DFS, prerequisiteCheck, request));
     }
 
     if (!libUris.contains("tar.gz")) {
-      errorMessages.add(getFailReason("lib_not_targz", prerequisiteCheck, request));
+      errorMessages.add(getFailReason(KEY_LIB_NOT_TARGZ, prerequisiteCheck, request));
     }
 
     if (Boolean.parseBoolean(useHadoopLibs)) {
-      errorMessages.add(getFailReason("tez_use_hadoop_libs_false", prerequisiteCheck, request));
+      errorMessages.add(getFailReason(KEY_USE_HADOOP_LIBS_FALSE, prerequisiteCheck, request));
     }
 
     if (!errorMessages.isEmpty()) {