Browse Source

Revert "AMBARI-16171. Changes to Phoenix QueryServer Kerberos configuration (Josh Elser via rlevas)"

This reverts commit 1c53b030a9e859d9d28bde955757e041065b54cb.
Sumit Mohanty 9 years ago
parent
commit
120b0b94f8

+ 0 - 43
ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java

@@ -29,14 +29,12 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
-import java.util.TreeMap;
 
 import javax.persistence.EntityManager;
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
 
 import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.configuration.Configuration.DatabaseType;
 import org.apache.ambari.server.controller.AmbariManagementController;
@@ -51,10 +49,7 @@ import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.PropertyInfo;
 import org.apache.ambari.server.state.ServiceInfo;
-import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.kerberos.AbstractKerberosDescriptorContainer;
-import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
-import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
 import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
 import org.apache.ambari.server.utils.VersionUtils;
@@ -630,45 +625,7 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
     }
   }
 
-  /**
-   * Retrieve the composite Kerberos Descriptor.
-   * <p>
-   * The composite Kerberos Descriptor is the cluster's stack-specific Kerberos Descriptor overlaid
-   * with changes specified by the user via the cluster's Kerberos Descriptor artifact.
-   *
-   * @param cluster the relevant cluster
-   * @return the composite Kerberos Descriptor
-   * @throws AmbariException
-   */
-  protected KerberosDescriptor getKerberosDescriptor(Cluster cluster) throws AmbariException {
-    // Get the Stack-defined Kerberos Descriptor (aka default Kerberos Descriptor)
-    AmbariMetaInfo ambariMetaInfo = injector.getInstance(AmbariMetaInfo.class);
-    StackId stackId = cluster.getCurrentStackVersion();
-    KerberosDescriptor defaultDescriptor = ambariMetaInfo.getKerberosDescriptor(stackId.getStackName(), stackId.getStackVersion());
 
-    // Get the User-set Kerberos Descriptor
-    ArtifactDAO artifactDAO = injector.getInstance(ArtifactDAO.class);
-    KerberosDescriptor artifactDescriptor = null;
-    ArtifactEntity artifactEntity = artifactDAO.findByNameAndForeignKeys("kerberos_descriptor",
-        new TreeMap<String, String>(Collections.singletonMap("cluster", String.valueOf(cluster.getClusterId()))));
-    if (artifactEntity != null) {
-      Map<String, Object> data = artifactEntity.getArtifactData();
-
-      if (data != null) {
-        artifactDescriptor = new KerberosDescriptorFactory().createInstance(data);
-      }
-    }
-
-    // Calculate and return the composite Kerberos Descriptor
-    if (defaultDescriptor == null) {
-      return artifactDescriptor;
-    } else if (artifactDescriptor == null) {
-      return defaultDescriptor;
-    } else {
-      defaultDescriptor.update(artifactDescriptor);
-      return defaultDescriptor;
-    }
-  }
 
   /**
    * Update the specified Kerberos Descriptor artifact to conform to the new structure.

+ 0 - 153
ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java

@@ -42,7 +42,6 @@ import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
 import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
-import org.apache.ambari.server.orm.dao.ArtifactDAO;
 import org.apache.ambari.server.orm.dao.ClusterDAO;
 import org.apache.ambari.server.orm.dao.PermissionDAO;
 import org.apache.ambari.server.orm.dao.PrincipalDAO;
@@ -53,7 +52,6 @@ import org.apache.ambari.server.orm.dao.RoleAuthorizationDAO;
 import org.apache.ambari.server.orm.dao.UserDAO;
 import org.apache.ambari.server.orm.dao.WidgetDAO;
 import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
-import org.apache.ambari.server.orm.entities.ArtifactEntity;
 import org.apache.ambari.server.orm.entities.ClusterEntity;
 import org.apache.ambari.server.orm.entities.PermissionEntity;
 import org.apache.ambari.server.orm.entities.PrincipalEntity;
@@ -76,18 +74,9 @@ import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.State;
-import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
-import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
 import org.apache.ambari.server.state.stack.WidgetLayout;
 import org.apache.ambari.server.state.stack.WidgetLayoutInfo;
-import org.apache.ambari.server.utils.VersionUtils;
 import org.apache.ambari.view.ClusterType;
-import org.apache.ambari.server.state.SecurityType;
-import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
-import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
-import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
-import org.apache.ambari.server.state.kerberos.KerberosKeytabDescriptor;
-import org.apache.ambari.server.state.kerberos.KerberosPrincipalDescriptor;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -149,8 +138,6 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
   public static final String SHORT_URL_COLUMN = "short_url";
   protected static final String CLUSTER_VERSION_TABLE = "cluster_version";
   protected static final String HOST_VERSION_TABLE = "host_version";
-  protected static final String PHOENIX_QUERY_SERVER_PRINCIPAL_KEY = "phoenix.queryserver.kerberos.principal";
-  protected static final String PHOENIX_QUERY_SERVER_KEYTAB_KEY = "phoenix.queryserver.keytab.file";
 
   private static final String OOZIE_ENV_CONFIG = "oozie-env";
   private static final String HIVE_ENV_CONFIG = "hive-env";
@@ -160,7 +147,6 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
   public static final String URL_ID_COLUMN = "url_id";
   private static final String PRINCIPAL_TYPE_TABLE = "adminprincipaltype";
   private static final String PRINCIPAL_TABLE = "adminprincipal";
-  protected static final String HBASE_SITE_CONFIG = "hbase-site";
 
   private static final Map<String, Integer> ROLE_ORDER;
 
@@ -341,8 +327,6 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
     updateHostRoleCommandTableDML();
     updateKerberosConfigs();
     updateYarnEnv();
-    updatePhoenixConfigs();
-    updateKerberosDescriptorArtifacts();
     removeHiveOozieDBConnectionConfigs();
     updateClustersAndHostsVersionStateTableDML();
     removeStandardDeviationAlerts();
@@ -2005,69 +1989,6 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
     }
   }
 
-  /**
-   * {@inheritDoc}
-   */
-  @Override
-  protected void updateKerberosDescriptorArtifact(ArtifactDAO artifactDAO, ArtifactEntity artifactEntity) throws AmbariException {
-    if (artifactEntity != null) {
-      Map<String, Object> data = artifactEntity.getArtifactData();
-
-      if (data != null) {
-        final KerberosDescriptor kerberosDescriptor = new KerberosDescriptorFactory().createInstance(data);
-
-        if (kerberosDescriptor != null) {
-          // Get the service that needs to be updated
-          KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService("HBASE");
-
-          if(serviceDescriptor != null) {
-            KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent("PHOENIX_QUERY_SERVER");
-
-            if (componentDescriptor != null) {
-              // Get the identity that needs to be updated
-              KerberosIdentityDescriptor origIdentityDescriptor = componentDescriptor.getIdentity("hbase_queryserver_hbase");
-
-              if (origIdentityDescriptor != null) {
-
-                // Create the new principal descriptor
-                KerberosPrincipalDescriptor origPrincipalDescriptor = origIdentityDescriptor.getPrincipalDescriptor();
-                KerberosPrincipalDescriptor newPrincipalDescriptor = new KerberosPrincipalDescriptor(
-                    null,
-                    null,
-                    (origPrincipalDescriptor == null)
-                        ? "hbase-site/phoenix.queryserver.kerberos.principal"
-                        : origPrincipalDescriptor.getConfiguration(),
-                    null);
-
-                // Create the new keytab descriptor
-                KerberosKeytabDescriptor origKeytabDescriptor = origIdentityDescriptor.getKeytabDescriptor();
-                KerberosKeytabDescriptor newKeytabDescriptor = new KerberosKeytabDescriptor(
-                    null,
-                    null,
-                    null,
-                    null,
-                    null,
-                    (origKeytabDescriptor == null)
-                        ? "hbase-site/phoenix.queryserver.keytab.file"
-                        : origKeytabDescriptor.getConfiguration(),
-                    false);
-
-                // Remove the old identity
-                componentDescriptor.removeIdentity("hbase_queryserver_hbase");
-
-                // Add the new identity
-                componentDescriptor.putIdentity(new KerberosIdentityDescriptor("/spnego", newPrincipalDescriptor, newKeytabDescriptor));
-
-                artifactEntity.setArtifactData(kerberosDescriptor.toMap());
-                artifactDAO.merge(artifactEntity);
-              }
-            }
-          }
-        }
-      }
-    }
-  }
-
   /**
    * Given a {@link ResourceEntity}, attempts to find the relevant cluster's name.
    *
@@ -2181,80 +2102,6 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
     }
   }
 
-  /**
-   * @return True if the stack is >=HDP-2.5, false otherwise.
-   */
-  protected boolean isAtLeastHdp25(StackId stackId) {
-    if (null == stackId) {
-      return false;
-    }
-
-    try {
-      return stackId.compareTo(new StackId("HDP-2.5")) >= 0;
-    } catch (Exception e) {
-      // Different stack names throw an exception.
-      return false;
-    }
-  }
-
-  /**
-   * Update Phoenix Query Server Kerberos configurations. Ambari 2.4 will alter the Phoenix Query Server to
-   * supporting SPNEGO authentication which requires that the "HTTP/_HOST" principal and corresponding
-   * keytab file instead of the generic HBase service keytab and principal it previously had.
-   */
-  protected void updatePhoenixConfigs() throws AmbariException {
-    final AmbariManagementController controller = injector.getInstance(AmbariManagementController.class);
-    final Clusters clusters = controller.getClusters();
-
-    if (null != clusters) {
-      Map<String, Cluster> clusterMap = clusters.getClusters();
-
-      if (null != clusterMap && !clusterMap.isEmpty()) {
-        for (final Cluster cluster : clusterMap.values()) {
-          Set<String> installedServices = cluster.getServices().keySet();
-          StackId stackId = cluster.getCurrentStackVersion();
-
-          // HBase is installed and Kerberos is enabled
-          if (installedServices.contains("HBASE") && SecurityType.KERBEROS == cluster.getSecurityType() && isAtLeastHdp25(stackId)) {
-            Config hbaseSite = cluster.getDesiredConfigByType(HBASE_SITE_CONFIG);
-            if (null != hbaseSite) {
-              Map<String, String> hbaseSiteProperties = hbaseSite.getProperties();
-              // Get Phoenix Query Server kerberos config properties
-              String pqsKrbPrincipal = hbaseSiteProperties.get(PHOENIX_QUERY_SERVER_PRINCIPAL_KEY);
-              String pqsKrbKeytab = hbaseSiteProperties.get(PHOENIX_QUERY_SERVER_KEYTAB_KEY);
-
-              // Principal and Keytab are set
-              if (null != pqsKrbPrincipal && null != pqsKrbKeytab) {
-                final Map<String, String> updatedKerberosProperties = new HashMap<>();
-                final KerberosDescriptor defaultDescriptor = getKerberosDescriptor(cluster);
-
-                KerberosIdentityDescriptor spnegoDescriptor = defaultDescriptor.getIdentity("spnego");
-                if (null != spnegoDescriptor) {
-                  // Add the SPNEGO config for the principal
-                  KerberosPrincipalDescriptor principalDescriptor = spnegoDescriptor.getPrincipalDescriptor();
-                  if (null != principalDescriptor) {
-                    updatedKerberosProperties.put(PHOENIX_QUERY_SERVER_PRINCIPAL_KEY, principalDescriptor.getValue());
-                  }
-
-                  // Add the SPNEGO config for the keytab
-                  KerberosKeytabDescriptor keytabDescriptor = spnegoDescriptor.getKeytabDescriptor();
-                  if (null != keytabDescriptor) {
-                    updatedKerberosProperties.put(PHOENIX_QUERY_SERVER_KEYTAB_KEY, keytabDescriptor.getFile());
-                  }
-
-                  // Update the configuration if we changed anything
-                  if (!updatedKerberosProperties.isEmpty()) {
-                    updateConfigurationProperties(HBASE_SITE_CONFIG, updatedKerberosProperties, true, false);
-                  }
-                }
-              }
-            }
-          }
-        }
-      }
-    }
-  }
-
   /**
    *  Update properties with name
    *  yarn.timeline-server.url to yarn.ats.url

+ 14 - 7
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/kerberos.json

@@ -42,11 +42,6 @@
             "hbase.coprocessor.regionserver.classes": "{{hbase_coprocessor_regionserver_classes}}",
             "hbase.bulkload.staging.dir": "/apps/hbase/staging"
           }
-        },
-        {
-          "core-site": {
-            "hadoop.proxyuser.HTTP.hosts": "${clusterHostInfo/phoenix_query_server_hosts}"
-          }
         }
       ],
       "components": [
@@ -109,11 +104,23 @@
           "name": "PHOENIX_QUERY_SERVER",
           "identities": [
             {
-              "name": "/spnego",
+              "name": "hbase_queryserver_hbase",
               "principal": {
-                "configuration": "hbase-site/phoenix.queryserver.kerberos.principal"
+                "value": "hbase/_HOST@${realm}",
+                "type" : "service",
+                "configuration": "hbase-site/phoenix.queryserver.kerberos.principal",
+                "local_username": "${hbase-env/hbase_user}"
               },
               "keytab": {
+                "file": "${keytab_dir}/hbase.service.keytab",
+                "owner": {
+                  "name": "${hbase-env/hbase_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
                 "configuration": "hbase-site/phoenix.queryserver.keytab.file"
               }
             }

+ 1 - 92
ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java

@@ -25,7 +25,6 @@ import junit.framework.Assert;
 
 import static org.easymock.EasyMock.*;
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertNull;
 
@@ -81,15 +80,10 @@ import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.SecurityType;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
-import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
-import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
-import org.apache.ambari.server.state.kerberos.KerberosKeytabDescriptor;
-import org.apache.ambari.server.state.kerberos.KerberosPrincipalDescriptor;
 import org.apache.ambari.server.state.stack.OsFamily;
 import org.apache.commons.io.FileUtils;
 import org.easymock.Capture;
@@ -107,6 +101,7 @@ import com.google.inject.Injector;
 import com.google.inject.Module;
 import com.google.inject.Provider;
 
+import junit.framework.Assert;
 import org.junit.rules.TemporaryFolder;
 
 public class UpgradeCatalog240Test {
@@ -463,8 +458,6 @@ public class UpgradeCatalog240Test {
     Method updateClusterInheritedPermissionsConfig = UpgradeCatalog240.class.getDeclaredMethod("updateClusterInheritedPermissionsConfig");
     Method createRolePrincipals = UpgradeCatalog240.class.getDeclaredMethod("createRolePrincipals");
     Method updateHDFSWidget = UpgradeCatalog240.class.getDeclaredMethod("updateHDFSWidgetDefinition");
-    Method updatePhoenixConfigs = UpgradeCatalog240.class.getDeclaredMethod("updatePhoenixConfigs");
-    Method updateKerberosDescriptorArtifacts = AbstractUpgradeCatalog.class.getDeclaredMethod("updateKerberosDescriptorArtifacts");
 
     Capture<String> capturedStatements = newCapture(CaptureType.ALL);
 
@@ -494,8 +487,6 @@ public class UpgradeCatalog240Test {
             .addMockedMethod(updateClusterInheritedPermissionsConfig)
             .addMockedMethod(createRolePrincipals)
             .addMockedMethod(updateHDFSWidget)
-            .addMockedMethod(updatePhoenixConfigs)
-            .addMockedMethod(updateKerberosDescriptorArtifacts)
             .createMock();
 
     Field field = AbstractUpgradeCatalog.class.getDeclaredField("dbAccessor");
@@ -520,8 +511,6 @@ public class UpgradeCatalog240Test {
     upgradeCatalog240.createRolePrincipals();
     upgradeCatalog240.updateClusterInheritedPermissionsConfig();
     upgradeCatalog240.updateHDFSWidgetDefinition();
-    upgradeCatalog240.updatePhoenixConfigs();
-    upgradeCatalog240.updateKerberosDescriptorArtifacts();
 
     replay(upgradeCatalog240, dbAccessor);
 
@@ -1527,86 +1516,6 @@ public class UpgradeCatalog240Test {
     verify(clusters, cluster, controller, widgetDAO, widgetEntity, stackInfo, serviceInfo);
   }
 
-  @Test
-  public void testPhoenixQueryServerKerberosUpdateConfigs() throws Exception{
-    // Tests that we switch from the HBase service principal and keytab to the SPNEGO service principal and keytab.
-    final String spnegoPrincipal = "HTTP/_HOST@EXAMPLE.COM";
-    final String spnegoKeytab = "/etc/security/keytabs/spnego.service.keytab";
-    final Map<String, String> oldPqsProperties = new HashMap<>();
-    oldPqsProperties.put("phoenix.queryserver.kerberos.principal", "hbase/_HOST@EXAMPLE.COM");
-    oldPqsProperties.put("phoenix.queryserver.keytab.file", "/etc/security/keytabs/hbase.service.keytab");
-    final Map<String, String> newPqsProperties = new HashMap<String, String>();
-    newPqsProperties.put("phoenix.queryserver.kerberos.principal", spnegoPrincipal);
-    newPqsProperties.put("phoenix.queryserver.keytab.file", spnegoKeytab);
-
-    final EasyMockSupport easyMockSupport = new EasyMockSupport();
-
-    // Set up all of the injected mocks to trigger the upgrade scenario
-    AmbariManagementController controller = easyMockSupport.createNiceMock(AmbariManagementController.class);
-    KerberosDescriptor kerberosDescriptor = easyMockSupport.createNiceMock(KerberosDescriptor.class);
-    KerberosIdentityDescriptor kerberosIdentityDescriptor = easyMockSupport.createNiceMock(KerberosIdentityDescriptor.class);
-    KerberosPrincipalDescriptor principalDescriptor = easyMockSupport.createNiceMock(KerberosPrincipalDescriptor.class);
-    KerberosKeytabDescriptor keytabDescriptor = easyMockSupport.createNiceMock(KerberosKeytabDescriptor.class);
-    Clusters clusters = easyMockSupport.createNiceMock(Clusters.class);
-    final Cluster cluster = easyMockSupport.createNiceMock(Cluster.class);
-    Config mockHbaseSite = easyMockSupport.createNiceMock(Config.class);
-    // HBase and Kerberos are both "installed"
-    final Map<String, Service> mockServices = new HashMap<>();
-    mockServices.put("HBASE", null);
-    final StackId stackId = new StackId("HDP-2.5");
-
-    expect(controller.getClusters()).andReturn(clusters).once();
-    expect(clusters.getClusters()).andReturn(Collections.singletonMap("normal", cluster)).once();
-    expect(cluster.getCurrentStackVersion()).andReturn(stackId);
-    expect(cluster.getServices()).andReturn(mockServices).once();
-    expect(cluster.getSecurityType()).andReturn(SecurityType.KERBEROS).anyTimes();
-    expect(cluster.getDesiredConfigByType(UpgradeCatalog240.HBASE_SITE_CONFIG)).andReturn(mockHbaseSite).atLeastOnce();
-    expect(mockHbaseSite.getProperties()).andReturn(oldPqsProperties).anyTimes();
-
-    // Stub out the KerberosDescriptor down to the Principal and Keytab Descriptors
-    expect(kerberosDescriptor.getIdentity("spnego")).andReturn(kerberosIdentityDescriptor).once();
-    expect(kerberosIdentityDescriptor.getPrincipalDescriptor()).andReturn(principalDescriptor).anyTimes();
-    expect(kerberosIdentityDescriptor.getKeytabDescriptor()).andReturn(keytabDescriptor).anyTimes();
-    expect(principalDescriptor.getValue()).andReturn(spnegoPrincipal).anyTimes();
-    expect(keytabDescriptor.getFile()).andReturn(spnegoKeytab).anyTimes();
-
-    Injector injector = easyMockSupport.createNiceMock(Injector.class);
-    expect(injector.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
-
-    easyMockSupport.replayAll();
-
-    UpgradeCatalog240 upgradeCatalog240 = createMockBuilder(UpgradeCatalog240.class)
-        .withConstructor(Injector.class)
-        .withArgs(injector)
-        .addMockedMethod("updateConfigurationProperties", String.class, Map.class, boolean.class, boolean.class)
-        .addMockedMethod("getKerberosDescriptor", Cluster.class)
-        .createMock();
-
-    expect(upgradeCatalog240.getKerberosDescriptor(cluster)).andReturn(kerberosDescriptor).once();
-
-    upgradeCatalog240.updateConfigurationProperties(UpgradeCatalog240.HBASE_SITE_CONFIG, newPqsProperties, true, false);
-    expectLastCall().once();
-
-    replay(upgradeCatalog240);
-
-    // Expected that we see the configuration updates fire
-    upgradeCatalog240.updatePhoenixConfigs();
-    easyMockSupport.verifyAll();
-  }
-
-  @Test
-  public void testStackIdVersion() {
-    final EasyMockSupport easyMockSupport = new EasyMockSupport();
-    Injector injector = easyMockSupport.createNiceMock(Injector.class);
-    UpgradeCatalog240 upgradeCatalog240 = new UpgradeCatalog240(injector);
-
-    assertFalse(upgradeCatalog240.isAtLeastHdp25(new StackId("HDP-2.3")));
-    assertFalse(upgradeCatalog240.isAtLeastHdp25(new StackId("HDP-2.4")));
-    assertTrue(upgradeCatalog240.isAtLeastHdp25(new StackId("HDP-2.5")));
-    assertTrue(upgradeCatalog240.isAtLeastHdp25(new StackId("HDP-2.6")));
-    assertFalse(upgradeCatalog240.isAtLeastHdp25(new StackId("SOMETHINGELSE-1.4")));
-  }
-
   @Test
   public void testUpdateAmsGrafanaIniContent() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException
   {